2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 #include <linux/firmware.h>
27 #include "radeon_asic.h"
28 #include "radeon_audio.h"
29 #include <uapi_drm/radeon_drm.h>
30 #include "evergreend.h"
33 #include "evergreen_reg.h"
34 #include "evergreen_blit_shaders.h"
35 #include "radeon_ucode.h"
38 * Indirect registers accessor
40 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
45 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
46 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
47 r = RREG32(EVERGREEN_CG_IND_DATA);
48 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
52 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
56 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
57 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
58 WREG32(EVERGREEN_CG_IND_DATA, (v));
59 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
62 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
67 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
68 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
69 r = RREG32(EVERGREEN_PIF_PHY0_DATA);
70 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
74 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
78 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
79 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
80 WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
81 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
84 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
89 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
90 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
91 r = RREG32(EVERGREEN_PIF_PHY1_DATA);
92 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
96 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
100 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
101 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
102 WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
103 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
106 static const u32 crtc_offsets[6] =
108 EVERGREEN_CRTC0_REGISTER_OFFSET,
109 EVERGREEN_CRTC1_REGISTER_OFFSET,
110 EVERGREEN_CRTC2_REGISTER_OFFSET,
111 EVERGREEN_CRTC3_REGISTER_OFFSET,
112 EVERGREEN_CRTC4_REGISTER_OFFSET,
113 EVERGREEN_CRTC5_REGISTER_OFFSET
116 #include "clearstate_evergreen.h"
118 static const u32 sumo_rlc_save_restore_register_list[] =
203 static void evergreen_gpu_init(struct radeon_device *rdev);
205 static const u32 evergreen_golden_registers[] =
207 0x3f90, 0xffff0000, 0xff000000,
208 0x9148, 0xffff0000, 0xff000000,
209 0x3f94, 0xffff0000, 0xff000000,
210 0x914c, 0xffff0000, 0xff000000,
211 0x9b7c, 0xffffffff, 0x00000000,
212 0x8a14, 0xffffffff, 0x00000007,
213 0x8b10, 0xffffffff, 0x00000000,
214 0x960c, 0xffffffff, 0x54763210,
215 0x88c4, 0xffffffff, 0x000000c2,
216 0x88d4, 0xffffffff, 0x00000010,
217 0x8974, 0xffffffff, 0x00000000,
218 0xc78, 0x00000080, 0x00000080,
219 0x5eb4, 0xffffffff, 0x00000002,
220 0x5e78, 0xffffffff, 0x001000f0,
221 0x6104, 0x01000300, 0x00000000,
222 0x5bc0, 0x00300000, 0x00000000,
223 0x7030, 0xffffffff, 0x00000011,
224 0x7c30, 0xffffffff, 0x00000011,
225 0x10830, 0xffffffff, 0x00000011,
226 0x11430, 0xffffffff, 0x00000011,
227 0x12030, 0xffffffff, 0x00000011,
228 0x12c30, 0xffffffff, 0x00000011,
229 0xd02c, 0xffffffff, 0x08421000,
230 0x240c, 0xffffffff, 0x00000380,
231 0x8b24, 0xffffffff, 0x00ff0fff,
232 0x28a4c, 0x06000000, 0x06000000,
233 0x10c, 0x00000001, 0x00000001,
234 0x8d00, 0xffffffff, 0x100e4848,
235 0x8d04, 0xffffffff, 0x00164745,
236 0x8c00, 0xffffffff, 0xe4000003,
237 0x8c04, 0xffffffff, 0x40600060,
238 0x8c08, 0xffffffff, 0x001c001c,
239 0x8cf0, 0xffffffff, 0x08e00620,
240 0x8c20, 0xffffffff, 0x00800080,
241 0x8c24, 0xffffffff, 0x00800080,
242 0x8c18, 0xffffffff, 0x20202078,
243 0x8c1c, 0xffffffff, 0x00001010,
244 0x28350, 0xffffffff, 0x00000000,
245 0xa008, 0xffffffff, 0x00010000,
246 0x5c4, 0xffffffff, 0x00000001,
247 0x9508, 0xffffffff, 0x00000002,
248 0x913c, 0x0000000f, 0x0000000a
251 static const u32 evergreen_golden_registers2[] =
253 0x2f4c, 0xffffffff, 0x00000000,
254 0x54f4, 0xffffffff, 0x00000000,
255 0x54f0, 0xffffffff, 0x00000000,
256 0x5498, 0xffffffff, 0x00000000,
257 0x549c, 0xffffffff, 0x00000000,
258 0x5494, 0xffffffff, 0x00000000,
259 0x53cc, 0xffffffff, 0x00000000,
260 0x53c8, 0xffffffff, 0x00000000,
261 0x53c4, 0xffffffff, 0x00000000,
262 0x53c0, 0xffffffff, 0x00000000,
263 0x53bc, 0xffffffff, 0x00000000,
264 0x53b8, 0xffffffff, 0x00000000,
265 0x53b4, 0xffffffff, 0x00000000,
266 0x53b0, 0xffffffff, 0x00000000
269 static const u32 cypress_mgcg_init[] =
271 0x802c, 0xffffffff, 0xc0000000,
272 0x5448, 0xffffffff, 0x00000100,
273 0x55e4, 0xffffffff, 0x00000100,
274 0x160c, 0xffffffff, 0x00000100,
275 0x5644, 0xffffffff, 0x00000100,
276 0xc164, 0xffffffff, 0x00000100,
277 0x8a18, 0xffffffff, 0x00000100,
278 0x897c, 0xffffffff, 0x06000100,
279 0x8b28, 0xffffffff, 0x00000100,
280 0x9144, 0xffffffff, 0x00000100,
281 0x9a60, 0xffffffff, 0x00000100,
282 0x9868, 0xffffffff, 0x00000100,
283 0x8d58, 0xffffffff, 0x00000100,
284 0x9510, 0xffffffff, 0x00000100,
285 0x949c, 0xffffffff, 0x00000100,
286 0x9654, 0xffffffff, 0x00000100,
287 0x9030, 0xffffffff, 0x00000100,
288 0x9034, 0xffffffff, 0x00000100,
289 0x9038, 0xffffffff, 0x00000100,
290 0x903c, 0xffffffff, 0x00000100,
291 0x9040, 0xffffffff, 0x00000100,
292 0xa200, 0xffffffff, 0x00000100,
293 0xa204, 0xffffffff, 0x00000100,
294 0xa208, 0xffffffff, 0x00000100,
295 0xa20c, 0xffffffff, 0x00000100,
296 0x971c, 0xffffffff, 0x00000100,
297 0x977c, 0xffffffff, 0x00000100,
298 0x3f80, 0xffffffff, 0x00000100,
299 0xa210, 0xffffffff, 0x00000100,
300 0xa214, 0xffffffff, 0x00000100,
301 0x4d8, 0xffffffff, 0x00000100,
302 0x9784, 0xffffffff, 0x00000100,
303 0x9698, 0xffffffff, 0x00000100,
304 0x4d4, 0xffffffff, 0x00000200,
305 0x30cc, 0xffffffff, 0x00000100,
306 0xd0c0, 0xffffffff, 0xff000100,
307 0x802c, 0xffffffff, 0x40000000,
308 0x915c, 0xffffffff, 0x00010000,
309 0x9160, 0xffffffff, 0x00030002,
310 0x9178, 0xffffffff, 0x00070000,
311 0x917c, 0xffffffff, 0x00030002,
312 0x9180, 0xffffffff, 0x00050004,
313 0x918c, 0xffffffff, 0x00010006,
314 0x9190, 0xffffffff, 0x00090008,
315 0x9194, 0xffffffff, 0x00070000,
316 0x9198, 0xffffffff, 0x00030002,
317 0x919c, 0xffffffff, 0x00050004,
318 0x91a8, 0xffffffff, 0x00010006,
319 0x91ac, 0xffffffff, 0x00090008,
320 0x91b0, 0xffffffff, 0x00070000,
321 0x91b4, 0xffffffff, 0x00030002,
322 0x91b8, 0xffffffff, 0x00050004,
323 0x91c4, 0xffffffff, 0x00010006,
324 0x91c8, 0xffffffff, 0x00090008,
325 0x91cc, 0xffffffff, 0x00070000,
326 0x91d0, 0xffffffff, 0x00030002,
327 0x91d4, 0xffffffff, 0x00050004,
328 0x91e0, 0xffffffff, 0x00010006,
329 0x91e4, 0xffffffff, 0x00090008,
330 0x91e8, 0xffffffff, 0x00000000,
331 0x91ec, 0xffffffff, 0x00070000,
332 0x91f0, 0xffffffff, 0x00030002,
333 0x91f4, 0xffffffff, 0x00050004,
334 0x9200, 0xffffffff, 0x00010006,
335 0x9204, 0xffffffff, 0x00090008,
336 0x9208, 0xffffffff, 0x00070000,
337 0x920c, 0xffffffff, 0x00030002,
338 0x9210, 0xffffffff, 0x00050004,
339 0x921c, 0xffffffff, 0x00010006,
340 0x9220, 0xffffffff, 0x00090008,
341 0x9224, 0xffffffff, 0x00070000,
342 0x9228, 0xffffffff, 0x00030002,
343 0x922c, 0xffffffff, 0x00050004,
344 0x9238, 0xffffffff, 0x00010006,
345 0x923c, 0xffffffff, 0x00090008,
346 0x9240, 0xffffffff, 0x00070000,
347 0x9244, 0xffffffff, 0x00030002,
348 0x9248, 0xffffffff, 0x00050004,
349 0x9254, 0xffffffff, 0x00010006,
350 0x9258, 0xffffffff, 0x00090008,
351 0x925c, 0xffffffff, 0x00070000,
352 0x9260, 0xffffffff, 0x00030002,
353 0x9264, 0xffffffff, 0x00050004,
354 0x9270, 0xffffffff, 0x00010006,
355 0x9274, 0xffffffff, 0x00090008,
356 0x9278, 0xffffffff, 0x00070000,
357 0x927c, 0xffffffff, 0x00030002,
358 0x9280, 0xffffffff, 0x00050004,
359 0x928c, 0xffffffff, 0x00010006,
360 0x9290, 0xffffffff, 0x00090008,
361 0x9294, 0xffffffff, 0x00000000,
362 0x929c, 0xffffffff, 0x00000001,
363 0x802c, 0xffffffff, 0x40010000,
364 0x915c, 0xffffffff, 0x00010000,
365 0x9160, 0xffffffff, 0x00030002,
366 0x9178, 0xffffffff, 0x00070000,
367 0x917c, 0xffffffff, 0x00030002,
368 0x9180, 0xffffffff, 0x00050004,
369 0x918c, 0xffffffff, 0x00010006,
370 0x9190, 0xffffffff, 0x00090008,
371 0x9194, 0xffffffff, 0x00070000,
372 0x9198, 0xffffffff, 0x00030002,
373 0x919c, 0xffffffff, 0x00050004,
374 0x91a8, 0xffffffff, 0x00010006,
375 0x91ac, 0xffffffff, 0x00090008,
376 0x91b0, 0xffffffff, 0x00070000,
377 0x91b4, 0xffffffff, 0x00030002,
378 0x91b8, 0xffffffff, 0x00050004,
379 0x91c4, 0xffffffff, 0x00010006,
380 0x91c8, 0xffffffff, 0x00090008,
381 0x91cc, 0xffffffff, 0x00070000,
382 0x91d0, 0xffffffff, 0x00030002,
383 0x91d4, 0xffffffff, 0x00050004,
384 0x91e0, 0xffffffff, 0x00010006,
385 0x91e4, 0xffffffff, 0x00090008,
386 0x91e8, 0xffffffff, 0x00000000,
387 0x91ec, 0xffffffff, 0x00070000,
388 0x91f0, 0xffffffff, 0x00030002,
389 0x91f4, 0xffffffff, 0x00050004,
390 0x9200, 0xffffffff, 0x00010006,
391 0x9204, 0xffffffff, 0x00090008,
392 0x9208, 0xffffffff, 0x00070000,
393 0x920c, 0xffffffff, 0x00030002,
394 0x9210, 0xffffffff, 0x00050004,
395 0x921c, 0xffffffff, 0x00010006,
396 0x9220, 0xffffffff, 0x00090008,
397 0x9224, 0xffffffff, 0x00070000,
398 0x9228, 0xffffffff, 0x00030002,
399 0x922c, 0xffffffff, 0x00050004,
400 0x9238, 0xffffffff, 0x00010006,
401 0x923c, 0xffffffff, 0x00090008,
402 0x9240, 0xffffffff, 0x00070000,
403 0x9244, 0xffffffff, 0x00030002,
404 0x9248, 0xffffffff, 0x00050004,
405 0x9254, 0xffffffff, 0x00010006,
406 0x9258, 0xffffffff, 0x00090008,
407 0x925c, 0xffffffff, 0x00070000,
408 0x9260, 0xffffffff, 0x00030002,
409 0x9264, 0xffffffff, 0x00050004,
410 0x9270, 0xffffffff, 0x00010006,
411 0x9274, 0xffffffff, 0x00090008,
412 0x9278, 0xffffffff, 0x00070000,
413 0x927c, 0xffffffff, 0x00030002,
414 0x9280, 0xffffffff, 0x00050004,
415 0x928c, 0xffffffff, 0x00010006,
416 0x9290, 0xffffffff, 0x00090008,
417 0x9294, 0xffffffff, 0x00000000,
418 0x929c, 0xffffffff, 0x00000001,
419 0x802c, 0xffffffff, 0xc0000000
422 static const u32 redwood_mgcg_init[] =
424 0x802c, 0xffffffff, 0xc0000000,
425 0x5448, 0xffffffff, 0x00000100,
426 0x55e4, 0xffffffff, 0x00000100,
427 0x160c, 0xffffffff, 0x00000100,
428 0x5644, 0xffffffff, 0x00000100,
429 0xc164, 0xffffffff, 0x00000100,
430 0x8a18, 0xffffffff, 0x00000100,
431 0x897c, 0xffffffff, 0x06000100,
432 0x8b28, 0xffffffff, 0x00000100,
433 0x9144, 0xffffffff, 0x00000100,
434 0x9a60, 0xffffffff, 0x00000100,
435 0x9868, 0xffffffff, 0x00000100,
436 0x8d58, 0xffffffff, 0x00000100,
437 0x9510, 0xffffffff, 0x00000100,
438 0x949c, 0xffffffff, 0x00000100,
439 0x9654, 0xffffffff, 0x00000100,
440 0x9030, 0xffffffff, 0x00000100,
441 0x9034, 0xffffffff, 0x00000100,
442 0x9038, 0xffffffff, 0x00000100,
443 0x903c, 0xffffffff, 0x00000100,
444 0x9040, 0xffffffff, 0x00000100,
445 0xa200, 0xffffffff, 0x00000100,
446 0xa204, 0xffffffff, 0x00000100,
447 0xa208, 0xffffffff, 0x00000100,
448 0xa20c, 0xffffffff, 0x00000100,
449 0x971c, 0xffffffff, 0x00000100,
450 0x977c, 0xffffffff, 0x00000100,
451 0x3f80, 0xffffffff, 0x00000100,
452 0xa210, 0xffffffff, 0x00000100,
453 0xa214, 0xffffffff, 0x00000100,
454 0x4d8, 0xffffffff, 0x00000100,
455 0x9784, 0xffffffff, 0x00000100,
456 0x9698, 0xffffffff, 0x00000100,
457 0x4d4, 0xffffffff, 0x00000200,
458 0x30cc, 0xffffffff, 0x00000100,
459 0xd0c0, 0xffffffff, 0xff000100,
460 0x802c, 0xffffffff, 0x40000000,
461 0x915c, 0xffffffff, 0x00010000,
462 0x9160, 0xffffffff, 0x00030002,
463 0x9178, 0xffffffff, 0x00070000,
464 0x917c, 0xffffffff, 0x00030002,
465 0x9180, 0xffffffff, 0x00050004,
466 0x918c, 0xffffffff, 0x00010006,
467 0x9190, 0xffffffff, 0x00090008,
468 0x9194, 0xffffffff, 0x00070000,
469 0x9198, 0xffffffff, 0x00030002,
470 0x919c, 0xffffffff, 0x00050004,
471 0x91a8, 0xffffffff, 0x00010006,
472 0x91ac, 0xffffffff, 0x00090008,
473 0x91b0, 0xffffffff, 0x00070000,
474 0x91b4, 0xffffffff, 0x00030002,
475 0x91b8, 0xffffffff, 0x00050004,
476 0x91c4, 0xffffffff, 0x00010006,
477 0x91c8, 0xffffffff, 0x00090008,
478 0x91cc, 0xffffffff, 0x00070000,
479 0x91d0, 0xffffffff, 0x00030002,
480 0x91d4, 0xffffffff, 0x00050004,
481 0x91e0, 0xffffffff, 0x00010006,
482 0x91e4, 0xffffffff, 0x00090008,
483 0x91e8, 0xffffffff, 0x00000000,
484 0x91ec, 0xffffffff, 0x00070000,
485 0x91f0, 0xffffffff, 0x00030002,
486 0x91f4, 0xffffffff, 0x00050004,
487 0x9200, 0xffffffff, 0x00010006,
488 0x9204, 0xffffffff, 0x00090008,
489 0x9294, 0xffffffff, 0x00000000,
490 0x929c, 0xffffffff, 0x00000001,
491 0x802c, 0xffffffff, 0xc0000000
494 static const u32 cedar_golden_registers[] =
496 0x3f90, 0xffff0000, 0xff000000,
497 0x9148, 0xffff0000, 0xff000000,
498 0x3f94, 0xffff0000, 0xff000000,
499 0x914c, 0xffff0000, 0xff000000,
500 0x9b7c, 0xffffffff, 0x00000000,
501 0x8a14, 0xffffffff, 0x00000007,
502 0x8b10, 0xffffffff, 0x00000000,
503 0x960c, 0xffffffff, 0x54763210,
504 0x88c4, 0xffffffff, 0x000000c2,
505 0x88d4, 0xffffffff, 0x00000000,
506 0x8974, 0xffffffff, 0x00000000,
507 0xc78, 0x00000080, 0x00000080,
508 0x5eb4, 0xffffffff, 0x00000002,
509 0x5e78, 0xffffffff, 0x001000f0,
510 0x6104, 0x01000300, 0x00000000,
511 0x5bc0, 0x00300000, 0x00000000,
512 0x7030, 0xffffffff, 0x00000011,
513 0x7c30, 0xffffffff, 0x00000011,
514 0x10830, 0xffffffff, 0x00000011,
515 0x11430, 0xffffffff, 0x00000011,
516 0xd02c, 0xffffffff, 0x08421000,
517 0x240c, 0xffffffff, 0x00000380,
518 0x8b24, 0xffffffff, 0x00ff0fff,
519 0x28a4c, 0x06000000, 0x06000000,
520 0x10c, 0x00000001, 0x00000001,
521 0x8d00, 0xffffffff, 0x100e4848,
522 0x8d04, 0xffffffff, 0x00164745,
523 0x8c00, 0xffffffff, 0xe4000003,
524 0x8c04, 0xffffffff, 0x40600060,
525 0x8c08, 0xffffffff, 0x001c001c,
526 0x8cf0, 0xffffffff, 0x08e00410,
527 0x8c20, 0xffffffff, 0x00800080,
528 0x8c24, 0xffffffff, 0x00800080,
529 0x8c18, 0xffffffff, 0x20202078,
530 0x8c1c, 0xffffffff, 0x00001010,
531 0x28350, 0xffffffff, 0x00000000,
532 0xa008, 0xffffffff, 0x00010000,
533 0x5c4, 0xffffffff, 0x00000001,
534 0x9508, 0xffffffff, 0x00000002
537 static const u32 cedar_mgcg_init[] =
539 0x802c, 0xffffffff, 0xc0000000,
540 0x5448, 0xffffffff, 0x00000100,
541 0x55e4, 0xffffffff, 0x00000100,
542 0x160c, 0xffffffff, 0x00000100,
543 0x5644, 0xffffffff, 0x00000100,
544 0xc164, 0xffffffff, 0x00000100,
545 0x8a18, 0xffffffff, 0x00000100,
546 0x897c, 0xffffffff, 0x06000100,
547 0x8b28, 0xffffffff, 0x00000100,
548 0x9144, 0xffffffff, 0x00000100,
549 0x9a60, 0xffffffff, 0x00000100,
550 0x9868, 0xffffffff, 0x00000100,
551 0x8d58, 0xffffffff, 0x00000100,
552 0x9510, 0xffffffff, 0x00000100,
553 0x949c, 0xffffffff, 0x00000100,
554 0x9654, 0xffffffff, 0x00000100,
555 0x9030, 0xffffffff, 0x00000100,
556 0x9034, 0xffffffff, 0x00000100,
557 0x9038, 0xffffffff, 0x00000100,
558 0x903c, 0xffffffff, 0x00000100,
559 0x9040, 0xffffffff, 0x00000100,
560 0xa200, 0xffffffff, 0x00000100,
561 0xa204, 0xffffffff, 0x00000100,
562 0xa208, 0xffffffff, 0x00000100,
563 0xa20c, 0xffffffff, 0x00000100,
564 0x971c, 0xffffffff, 0x00000100,
565 0x977c, 0xffffffff, 0x00000100,
566 0x3f80, 0xffffffff, 0x00000100,
567 0xa210, 0xffffffff, 0x00000100,
568 0xa214, 0xffffffff, 0x00000100,
569 0x4d8, 0xffffffff, 0x00000100,
570 0x9784, 0xffffffff, 0x00000100,
571 0x9698, 0xffffffff, 0x00000100,
572 0x4d4, 0xffffffff, 0x00000200,
573 0x30cc, 0xffffffff, 0x00000100,
574 0xd0c0, 0xffffffff, 0xff000100,
575 0x802c, 0xffffffff, 0x40000000,
576 0x915c, 0xffffffff, 0x00010000,
577 0x9178, 0xffffffff, 0x00050000,
578 0x917c, 0xffffffff, 0x00030002,
579 0x918c, 0xffffffff, 0x00010004,
580 0x9190, 0xffffffff, 0x00070006,
581 0x9194, 0xffffffff, 0x00050000,
582 0x9198, 0xffffffff, 0x00030002,
583 0x91a8, 0xffffffff, 0x00010004,
584 0x91ac, 0xffffffff, 0x00070006,
585 0x91e8, 0xffffffff, 0x00000000,
586 0x9294, 0xffffffff, 0x00000000,
587 0x929c, 0xffffffff, 0x00000001,
588 0x802c, 0xffffffff, 0xc0000000
591 static const u32 juniper_mgcg_init[] =
593 0x802c, 0xffffffff, 0xc0000000,
594 0x5448, 0xffffffff, 0x00000100,
595 0x55e4, 0xffffffff, 0x00000100,
596 0x160c, 0xffffffff, 0x00000100,
597 0x5644, 0xffffffff, 0x00000100,
598 0xc164, 0xffffffff, 0x00000100,
599 0x8a18, 0xffffffff, 0x00000100,
600 0x897c, 0xffffffff, 0x06000100,
601 0x8b28, 0xffffffff, 0x00000100,
602 0x9144, 0xffffffff, 0x00000100,
603 0x9a60, 0xffffffff, 0x00000100,
604 0x9868, 0xffffffff, 0x00000100,
605 0x8d58, 0xffffffff, 0x00000100,
606 0x9510, 0xffffffff, 0x00000100,
607 0x949c, 0xffffffff, 0x00000100,
608 0x9654, 0xffffffff, 0x00000100,
609 0x9030, 0xffffffff, 0x00000100,
610 0x9034, 0xffffffff, 0x00000100,
611 0x9038, 0xffffffff, 0x00000100,
612 0x903c, 0xffffffff, 0x00000100,
613 0x9040, 0xffffffff, 0x00000100,
614 0xa200, 0xffffffff, 0x00000100,
615 0xa204, 0xffffffff, 0x00000100,
616 0xa208, 0xffffffff, 0x00000100,
617 0xa20c, 0xffffffff, 0x00000100,
618 0x971c, 0xffffffff, 0x00000100,
619 0xd0c0, 0xffffffff, 0xff000100,
620 0x802c, 0xffffffff, 0x40000000,
621 0x915c, 0xffffffff, 0x00010000,
622 0x9160, 0xffffffff, 0x00030002,
623 0x9178, 0xffffffff, 0x00070000,
624 0x917c, 0xffffffff, 0x00030002,
625 0x9180, 0xffffffff, 0x00050004,
626 0x918c, 0xffffffff, 0x00010006,
627 0x9190, 0xffffffff, 0x00090008,
628 0x9194, 0xffffffff, 0x00070000,
629 0x9198, 0xffffffff, 0x00030002,
630 0x919c, 0xffffffff, 0x00050004,
631 0x91a8, 0xffffffff, 0x00010006,
632 0x91ac, 0xffffffff, 0x00090008,
633 0x91b0, 0xffffffff, 0x00070000,
634 0x91b4, 0xffffffff, 0x00030002,
635 0x91b8, 0xffffffff, 0x00050004,
636 0x91c4, 0xffffffff, 0x00010006,
637 0x91c8, 0xffffffff, 0x00090008,
638 0x91cc, 0xffffffff, 0x00070000,
639 0x91d0, 0xffffffff, 0x00030002,
640 0x91d4, 0xffffffff, 0x00050004,
641 0x91e0, 0xffffffff, 0x00010006,
642 0x91e4, 0xffffffff, 0x00090008,
643 0x91e8, 0xffffffff, 0x00000000,
644 0x91ec, 0xffffffff, 0x00070000,
645 0x91f0, 0xffffffff, 0x00030002,
646 0x91f4, 0xffffffff, 0x00050004,
647 0x9200, 0xffffffff, 0x00010006,
648 0x9204, 0xffffffff, 0x00090008,
649 0x9208, 0xffffffff, 0x00070000,
650 0x920c, 0xffffffff, 0x00030002,
651 0x9210, 0xffffffff, 0x00050004,
652 0x921c, 0xffffffff, 0x00010006,
653 0x9220, 0xffffffff, 0x00090008,
654 0x9224, 0xffffffff, 0x00070000,
655 0x9228, 0xffffffff, 0x00030002,
656 0x922c, 0xffffffff, 0x00050004,
657 0x9238, 0xffffffff, 0x00010006,
658 0x923c, 0xffffffff, 0x00090008,
659 0x9240, 0xffffffff, 0x00070000,
660 0x9244, 0xffffffff, 0x00030002,
661 0x9248, 0xffffffff, 0x00050004,
662 0x9254, 0xffffffff, 0x00010006,
663 0x9258, 0xffffffff, 0x00090008,
664 0x925c, 0xffffffff, 0x00070000,
665 0x9260, 0xffffffff, 0x00030002,
666 0x9264, 0xffffffff, 0x00050004,
667 0x9270, 0xffffffff, 0x00010006,
668 0x9274, 0xffffffff, 0x00090008,
669 0x9278, 0xffffffff, 0x00070000,
670 0x927c, 0xffffffff, 0x00030002,
671 0x9280, 0xffffffff, 0x00050004,
672 0x928c, 0xffffffff, 0x00010006,
673 0x9290, 0xffffffff, 0x00090008,
674 0x9294, 0xffffffff, 0x00000000,
675 0x929c, 0xffffffff, 0x00000001,
676 0x802c, 0xffffffff, 0xc0000000,
677 0x977c, 0xffffffff, 0x00000100,
678 0x3f80, 0xffffffff, 0x00000100,
679 0xa210, 0xffffffff, 0x00000100,
680 0xa214, 0xffffffff, 0x00000100,
681 0x4d8, 0xffffffff, 0x00000100,
682 0x9784, 0xffffffff, 0x00000100,
683 0x9698, 0xffffffff, 0x00000100,
684 0x4d4, 0xffffffff, 0x00000200,
685 0x30cc, 0xffffffff, 0x00000100,
686 0x802c, 0xffffffff, 0xc0000000
689 static const u32 supersumo_golden_registers[] =
691 0x5eb4, 0xffffffff, 0x00000002,
692 0x5c4, 0xffffffff, 0x00000001,
693 0x7030, 0xffffffff, 0x00000011,
694 0x7c30, 0xffffffff, 0x00000011,
695 0x6104, 0x01000300, 0x00000000,
696 0x5bc0, 0x00300000, 0x00000000,
697 0x8c04, 0xffffffff, 0x40600060,
698 0x8c08, 0xffffffff, 0x001c001c,
699 0x8c20, 0xffffffff, 0x00800080,
700 0x8c24, 0xffffffff, 0x00800080,
701 0x8c18, 0xffffffff, 0x20202078,
702 0x8c1c, 0xffffffff, 0x00001010,
703 0x918c, 0xffffffff, 0x00010006,
704 0x91a8, 0xffffffff, 0x00010006,
705 0x91c4, 0xffffffff, 0x00010006,
706 0x91e0, 0xffffffff, 0x00010006,
707 0x9200, 0xffffffff, 0x00010006,
708 0x9150, 0xffffffff, 0x6e944040,
709 0x917c, 0xffffffff, 0x00030002,
710 0x9180, 0xffffffff, 0x00050004,
711 0x9198, 0xffffffff, 0x00030002,
712 0x919c, 0xffffffff, 0x00050004,
713 0x91b4, 0xffffffff, 0x00030002,
714 0x91b8, 0xffffffff, 0x00050004,
715 0x91d0, 0xffffffff, 0x00030002,
716 0x91d4, 0xffffffff, 0x00050004,
717 0x91f0, 0xffffffff, 0x00030002,
718 0x91f4, 0xffffffff, 0x00050004,
719 0x915c, 0xffffffff, 0x00010000,
720 0x9160, 0xffffffff, 0x00030002,
721 0x3f90, 0xffff0000, 0xff000000,
722 0x9178, 0xffffffff, 0x00070000,
723 0x9194, 0xffffffff, 0x00070000,
724 0x91b0, 0xffffffff, 0x00070000,
725 0x91cc, 0xffffffff, 0x00070000,
726 0x91ec, 0xffffffff, 0x00070000,
727 0x9148, 0xffff0000, 0xff000000,
728 0x9190, 0xffffffff, 0x00090008,
729 0x91ac, 0xffffffff, 0x00090008,
730 0x91c8, 0xffffffff, 0x00090008,
731 0x91e4, 0xffffffff, 0x00090008,
732 0x9204, 0xffffffff, 0x00090008,
733 0x3f94, 0xffff0000, 0xff000000,
734 0x914c, 0xffff0000, 0xff000000,
735 0x929c, 0xffffffff, 0x00000001,
736 0x8a18, 0xffffffff, 0x00000100,
737 0x8b28, 0xffffffff, 0x00000100,
738 0x9144, 0xffffffff, 0x00000100,
739 0x5644, 0xffffffff, 0x00000100,
740 0x9b7c, 0xffffffff, 0x00000000,
741 0x8030, 0xffffffff, 0x0000100a,
742 0x8a14, 0xffffffff, 0x00000007,
743 0x8b24, 0xffffffff, 0x00ff0fff,
744 0x8b10, 0xffffffff, 0x00000000,
745 0x28a4c, 0x06000000, 0x06000000,
746 0x4d8, 0xffffffff, 0x00000100,
747 0x913c, 0xffff000f, 0x0100000a,
748 0x960c, 0xffffffff, 0x54763210,
749 0x88c4, 0xffffffff, 0x000000c2,
750 0x88d4, 0xffffffff, 0x00000010,
751 0x8974, 0xffffffff, 0x00000000,
752 0xc78, 0x00000080, 0x00000080,
753 0x5e78, 0xffffffff, 0x001000f0,
754 0xd02c, 0xffffffff, 0x08421000,
755 0xa008, 0xffffffff, 0x00010000,
756 0x8d00, 0xffffffff, 0x100e4848,
757 0x8d04, 0xffffffff, 0x00164745,
758 0x8c00, 0xffffffff, 0xe4000003,
759 0x8cf0, 0x1fffffff, 0x08e00620,
760 0x28350, 0xffffffff, 0x00000000,
761 0x9508, 0xffffffff, 0x00000002
764 static const u32 sumo_golden_registers[] =
766 0x900c, 0x00ffffff, 0x0017071f,
767 0x8c18, 0xffffffff, 0x10101060,
768 0x8c1c, 0xffffffff, 0x00001010,
769 0x8c30, 0x0000000f, 0x00000005,
770 0x9688, 0x0000000f, 0x00000007
773 static const u32 wrestler_golden_registers[] =
775 0x5eb4, 0xffffffff, 0x00000002,
776 0x5c4, 0xffffffff, 0x00000001,
777 0x7030, 0xffffffff, 0x00000011,
778 0x7c30, 0xffffffff, 0x00000011,
779 0x6104, 0x01000300, 0x00000000,
780 0x5bc0, 0x00300000, 0x00000000,
781 0x918c, 0xffffffff, 0x00010006,
782 0x91a8, 0xffffffff, 0x00010006,
783 0x9150, 0xffffffff, 0x6e944040,
784 0x917c, 0xffffffff, 0x00030002,
785 0x9198, 0xffffffff, 0x00030002,
786 0x915c, 0xffffffff, 0x00010000,
787 0x3f90, 0xffff0000, 0xff000000,
788 0x9178, 0xffffffff, 0x00070000,
789 0x9194, 0xffffffff, 0x00070000,
790 0x9148, 0xffff0000, 0xff000000,
791 0x9190, 0xffffffff, 0x00090008,
792 0x91ac, 0xffffffff, 0x00090008,
793 0x3f94, 0xffff0000, 0xff000000,
794 0x914c, 0xffff0000, 0xff000000,
795 0x929c, 0xffffffff, 0x00000001,
796 0x8a18, 0xffffffff, 0x00000100,
797 0x8b28, 0xffffffff, 0x00000100,
798 0x9144, 0xffffffff, 0x00000100,
799 0x9b7c, 0xffffffff, 0x00000000,
800 0x8030, 0xffffffff, 0x0000100a,
801 0x8a14, 0xffffffff, 0x00000001,
802 0x8b24, 0xffffffff, 0x00ff0fff,
803 0x8b10, 0xffffffff, 0x00000000,
804 0x28a4c, 0x06000000, 0x06000000,
805 0x4d8, 0xffffffff, 0x00000100,
806 0x913c, 0xffff000f, 0x0100000a,
807 0x960c, 0xffffffff, 0x54763210,
808 0x88c4, 0xffffffff, 0x000000c2,
809 0x88d4, 0xffffffff, 0x00000010,
810 0x8974, 0xffffffff, 0x00000000,
811 0xc78, 0x00000080, 0x00000080,
812 0x5e78, 0xffffffff, 0x001000f0,
813 0xd02c, 0xffffffff, 0x08421000,
814 0xa008, 0xffffffff, 0x00010000,
815 0x8d00, 0xffffffff, 0x100e4848,
816 0x8d04, 0xffffffff, 0x00164745,
817 0x8c00, 0xffffffff, 0xe4000003,
818 0x8cf0, 0x1fffffff, 0x08e00410,
819 0x28350, 0xffffffff, 0x00000000,
820 0x9508, 0xffffffff, 0x00000002,
821 0x900c, 0xffffffff, 0x0017071f,
822 0x8c18, 0xffffffff, 0x10101060,
823 0x8c1c, 0xffffffff, 0x00001010
826 static const u32 barts_golden_registers[] =
828 0x5eb4, 0xffffffff, 0x00000002,
829 0x5e78, 0x8f311ff1, 0x001000f0,
830 0x3f90, 0xffff0000, 0xff000000,
831 0x9148, 0xffff0000, 0xff000000,
832 0x3f94, 0xffff0000, 0xff000000,
833 0x914c, 0xffff0000, 0xff000000,
834 0xc78, 0x00000080, 0x00000080,
835 0xbd4, 0x70073777, 0x00010001,
836 0xd02c, 0xbfffff1f, 0x08421000,
837 0xd0b8, 0x03773777, 0x02011003,
838 0x5bc0, 0x00200000, 0x50100000,
839 0x98f8, 0x33773777, 0x02011003,
840 0x98fc, 0xffffffff, 0x76543210,
841 0x7030, 0x31000311, 0x00000011,
842 0x2f48, 0x00000007, 0x02011003,
843 0x6b28, 0x00000010, 0x00000012,
844 0x7728, 0x00000010, 0x00000012,
845 0x10328, 0x00000010, 0x00000012,
846 0x10f28, 0x00000010, 0x00000012,
847 0x11b28, 0x00000010, 0x00000012,
848 0x12728, 0x00000010, 0x00000012,
849 0x240c, 0x000007ff, 0x00000380,
850 0x8a14, 0xf000001f, 0x00000007,
851 0x8b24, 0x3fff3fff, 0x00ff0fff,
852 0x8b10, 0x0000ff0f, 0x00000000,
853 0x28a4c, 0x07ffffff, 0x06000000,
854 0x10c, 0x00000001, 0x00010003,
855 0xa02c, 0xffffffff, 0x0000009b,
856 0x913c, 0x0000000f, 0x0100000a,
857 0x8d00, 0xffff7f7f, 0x100e4848,
858 0x8d04, 0x00ffffff, 0x00164745,
859 0x8c00, 0xfffc0003, 0xe4000003,
860 0x8c04, 0xf8ff00ff, 0x40600060,
861 0x8c08, 0x00ff00ff, 0x001c001c,
862 0x8cf0, 0x1fff1fff, 0x08e00620,
863 0x8c20, 0x0fff0fff, 0x00800080,
864 0x8c24, 0x0fff0fff, 0x00800080,
865 0x8c18, 0xffffffff, 0x20202078,
866 0x8c1c, 0x0000ffff, 0x00001010,
867 0x28350, 0x00000f01, 0x00000000,
868 0x9508, 0x3700001f, 0x00000002,
869 0x960c, 0xffffffff, 0x54763210,
870 0x88c4, 0x001f3ae3, 0x000000c2,
871 0x88d4, 0x0000001f, 0x00000010,
872 0x8974, 0xffffffff, 0x00000000
875 static const u32 turks_golden_registers[] =
877 0x5eb4, 0xffffffff, 0x00000002,
878 0x5e78, 0x8f311ff1, 0x001000f0,
879 0x8c8, 0x00003000, 0x00001070,
880 0x8cc, 0x000fffff, 0x00040035,
881 0x3f90, 0xffff0000, 0xfff00000,
882 0x9148, 0xffff0000, 0xfff00000,
883 0x3f94, 0xffff0000, 0xfff00000,
884 0x914c, 0xffff0000, 0xfff00000,
885 0xc78, 0x00000080, 0x00000080,
886 0xbd4, 0x00073007, 0x00010002,
887 0xd02c, 0xbfffff1f, 0x08421000,
888 0xd0b8, 0x03773777, 0x02010002,
889 0x5bc0, 0x00200000, 0x50100000,
890 0x98f8, 0x33773777, 0x00010002,
891 0x98fc, 0xffffffff, 0x33221100,
892 0x7030, 0x31000311, 0x00000011,
893 0x2f48, 0x33773777, 0x00010002,
894 0x6b28, 0x00000010, 0x00000012,
895 0x7728, 0x00000010, 0x00000012,
896 0x10328, 0x00000010, 0x00000012,
897 0x10f28, 0x00000010, 0x00000012,
898 0x11b28, 0x00000010, 0x00000012,
899 0x12728, 0x00000010, 0x00000012,
900 0x240c, 0x000007ff, 0x00000380,
901 0x8a14, 0xf000001f, 0x00000007,
902 0x8b24, 0x3fff3fff, 0x00ff0fff,
903 0x8b10, 0x0000ff0f, 0x00000000,
904 0x28a4c, 0x07ffffff, 0x06000000,
905 0x10c, 0x00000001, 0x00010003,
906 0xa02c, 0xffffffff, 0x0000009b,
907 0x913c, 0x0000000f, 0x0100000a,
908 0x8d00, 0xffff7f7f, 0x100e4848,
909 0x8d04, 0x00ffffff, 0x00164745,
910 0x8c00, 0xfffc0003, 0xe4000003,
911 0x8c04, 0xf8ff00ff, 0x40600060,
912 0x8c08, 0x00ff00ff, 0x001c001c,
913 0x8cf0, 0x1fff1fff, 0x08e00410,
914 0x8c20, 0x0fff0fff, 0x00800080,
915 0x8c24, 0x0fff0fff, 0x00800080,
916 0x8c18, 0xffffffff, 0x20202078,
917 0x8c1c, 0x0000ffff, 0x00001010,
918 0x28350, 0x00000f01, 0x00000000,
919 0x9508, 0x3700001f, 0x00000002,
920 0x960c, 0xffffffff, 0x54763210,
921 0x88c4, 0x001f3ae3, 0x000000c2,
922 0x88d4, 0x0000001f, 0x00000010,
923 0x8974, 0xffffffff, 0x00000000
926 static const u32 caicos_golden_registers[] =
928 0x5eb4, 0xffffffff, 0x00000002,
929 0x5e78, 0x8f311ff1, 0x001000f0,
930 0x8c8, 0x00003420, 0x00001450,
931 0x8cc, 0x000fffff, 0x00040035,
932 0x3f90, 0xffff0000, 0xfffc0000,
933 0x9148, 0xffff0000, 0xfffc0000,
934 0x3f94, 0xffff0000, 0xfffc0000,
935 0x914c, 0xffff0000, 0xfffc0000,
936 0xc78, 0x00000080, 0x00000080,
937 0xbd4, 0x00073007, 0x00010001,
938 0xd02c, 0xbfffff1f, 0x08421000,
939 0xd0b8, 0x03773777, 0x02010001,
940 0x5bc0, 0x00200000, 0x50100000,
941 0x98f8, 0x33773777, 0x02010001,
942 0x98fc, 0xffffffff, 0x33221100,
943 0x7030, 0x31000311, 0x00000011,
944 0x2f48, 0x33773777, 0x02010001,
945 0x6b28, 0x00000010, 0x00000012,
946 0x7728, 0x00000010, 0x00000012,
947 0x10328, 0x00000010, 0x00000012,
948 0x10f28, 0x00000010, 0x00000012,
949 0x11b28, 0x00000010, 0x00000012,
950 0x12728, 0x00000010, 0x00000012,
951 0x240c, 0x000007ff, 0x00000380,
952 0x8a14, 0xf000001f, 0x00000001,
953 0x8b24, 0x3fff3fff, 0x00ff0fff,
954 0x8b10, 0x0000ff0f, 0x00000000,
955 0x28a4c, 0x07ffffff, 0x06000000,
956 0x10c, 0x00000001, 0x00010003,
957 0xa02c, 0xffffffff, 0x0000009b,
958 0x913c, 0x0000000f, 0x0100000a,
959 0x8d00, 0xffff7f7f, 0x100e4848,
960 0x8d04, 0x00ffffff, 0x00164745,
961 0x8c00, 0xfffc0003, 0xe4000003,
962 0x8c04, 0xf8ff00ff, 0x40600060,
963 0x8c08, 0x00ff00ff, 0x001c001c,
964 0x8cf0, 0x1fff1fff, 0x08e00410,
965 0x8c20, 0x0fff0fff, 0x00800080,
966 0x8c24, 0x0fff0fff, 0x00800080,
967 0x8c18, 0xffffffff, 0x20202078,
968 0x8c1c, 0x0000ffff, 0x00001010,
969 0x28350, 0x00000f01, 0x00000000,
970 0x9508, 0x3700001f, 0x00000002,
971 0x960c, 0xffffffff, 0x54763210,
972 0x88c4, 0x001f3ae3, 0x000000c2,
973 0x88d4, 0x0000001f, 0x00000010,
974 0x8974, 0xffffffff, 0x00000000
977 static void evergreen_init_golden_registers(struct radeon_device *rdev)
979 switch (rdev->family) {
982 radeon_program_register_sequence(rdev,
983 evergreen_golden_registers,
984 (const u32)ARRAY_SIZE(evergreen_golden_registers));
985 radeon_program_register_sequence(rdev,
986 evergreen_golden_registers2,
987 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
988 radeon_program_register_sequence(rdev,
990 (const u32)ARRAY_SIZE(cypress_mgcg_init));
993 radeon_program_register_sequence(rdev,
994 evergreen_golden_registers,
995 (const u32)ARRAY_SIZE(evergreen_golden_registers));
996 radeon_program_register_sequence(rdev,
997 evergreen_golden_registers2,
998 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
999 radeon_program_register_sequence(rdev,
1001 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1004 radeon_program_register_sequence(rdev,
1005 evergreen_golden_registers,
1006 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1007 radeon_program_register_sequence(rdev,
1008 evergreen_golden_registers2,
1009 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1010 radeon_program_register_sequence(rdev,
1012 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1015 radeon_program_register_sequence(rdev,
1016 cedar_golden_registers,
1017 (const u32)ARRAY_SIZE(cedar_golden_registers));
1018 radeon_program_register_sequence(rdev,
1019 evergreen_golden_registers2,
1020 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1021 radeon_program_register_sequence(rdev,
1023 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1026 radeon_program_register_sequence(rdev,
1027 wrestler_golden_registers,
1028 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1031 radeon_program_register_sequence(rdev,
1032 supersumo_golden_registers,
1033 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1036 radeon_program_register_sequence(rdev,
1037 supersumo_golden_registers,
1038 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1039 radeon_program_register_sequence(rdev,
1040 sumo_golden_registers,
1041 (const u32)ARRAY_SIZE(sumo_golden_registers));
1044 radeon_program_register_sequence(rdev,
1045 barts_golden_registers,
1046 (const u32)ARRAY_SIZE(barts_golden_registers));
1049 radeon_program_register_sequence(rdev,
1050 turks_golden_registers,
1051 (const u32)ARRAY_SIZE(turks_golden_registers));
1054 radeon_program_register_sequence(rdev,
1055 caicos_golden_registers,
1056 (const u32)ARRAY_SIZE(caicos_golden_registers));
1064 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1066 * @rdev: radeon_device pointer
1067 * @reg: register offset in bytes
1068 * @val: register value
1070 * Returns 0 for success or -EINVAL for an invalid register
1073 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1078 case GRBM_STATUS_SE0:
1079 case GRBM_STATUS_SE1:
1082 case DMA_STATUS_REG:
1091 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1092 unsigned *bankh, unsigned *mtaspect,
1093 unsigned *tile_split)
1095 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1096 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1097 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1098 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1101 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1102 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1103 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1104 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1108 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1109 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1110 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1111 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1113 switch (*mtaspect) {
1115 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1116 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1117 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1118 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1122 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1123 u32 cntl_reg, u32 status_reg)
1126 struct atom_clock_dividers dividers;
1128 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1129 clock, false, ÷rs);
1133 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1135 for (i = 0; i < 100; i++) {
1136 if (RREG32(status_reg) & DCLK_STATUS)
1146 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1149 u32 cg_scratch = RREG32(CG_SCRATCH1);
1151 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1154 cg_scratch &= 0xffff0000;
1155 cg_scratch |= vclk / 100; /* Mhz */
1157 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1160 cg_scratch &= 0x0000ffff;
1161 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1164 WREG32(CG_SCRATCH1, cg_scratch);
1169 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1171 /* start off with something large */
1172 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1175 /* bypass vclk and dclk with bclk */
1176 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1177 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1178 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1180 /* put PLL in bypass mode */
1181 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1183 if (!vclk || !dclk) {
1184 /* keep the Bypass mode, put PLL to sleep */
1185 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1189 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1190 16384, 0x03FFFFFF, 0, 128, 5,
1191 &fb_div, &vclk_div, &dclk_div);
1195 /* set VCO_MODE to 1 */
1196 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1198 /* toggle UPLL_SLEEP to 1 then back to 0 */
1199 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1200 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1202 /* deassert UPLL_RESET */
1203 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1207 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1211 /* assert UPLL_RESET again */
1212 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1214 /* disable spread spectrum. */
1215 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1217 /* set feedback divider */
1218 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1220 /* set ref divider to 0 */
1221 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1223 if (fb_div < 307200)
1224 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1226 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1228 /* set PDIV_A and PDIV_B */
1229 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1230 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1231 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1233 /* give the PLL some time to settle */
1236 /* deassert PLL_RESET */
1237 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1241 /* switch from bypass mode to normal mode */
1242 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1244 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1248 /* switch VCLK and DCLK selection */
1249 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1250 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1251 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1258 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1263 readrq = pcie_get_readrq(rdev->pdev);
1264 v = ffs(readrq) - 8;
1265 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1266 * to avoid hangs or perfomance issues
1268 if ((v == 0) || (v == 6) || (v == 7))
1269 pcie_set_readrq(rdev->pdev, 512);
1272 void dce4_program_fmt(struct drm_encoder *encoder)
1274 struct drm_device *dev = encoder->dev;
1275 struct radeon_device *rdev = dev->dev_private;
1276 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1277 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1278 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1281 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1284 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1285 bpc = radeon_get_monitor_bpc(connector);
1286 dither = radeon_connector->dither;
1289 /* LVDS/eDP FMT is set up by atom */
1290 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1293 /* not needed for analog */
1294 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1295 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1303 if (dither == RADEON_FMT_DITHER_ENABLE)
1304 /* XXX sort out optimal dither settings */
1305 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1306 FMT_SPATIAL_DITHER_EN);
1308 tmp |= FMT_TRUNCATE_EN;
1311 if (dither == RADEON_FMT_DITHER_ENABLE)
1312 /* XXX sort out optimal dither settings */
1313 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1314 FMT_RGB_RANDOM_ENABLE |
1315 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1317 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1325 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1328 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1330 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1336 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1340 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1341 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1350 * dce4_wait_for_vblank - vblank wait asic callback.
1352 * @rdev: radeon_device pointer
1353 * @crtc: crtc to wait for vblank on
1355 * Wait for vblank on the requested crtc (evergreen+).
1357 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1361 if (crtc >= rdev->num_crtc)
1364 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1367 /* depending on when we hit vblank, we may be close to active; if so,
1368 * wait for another frame.
1370 while (dce4_is_in_vblank(rdev, crtc)) {
1371 if (i++ % 100 == 0) {
1372 if (!dce4_is_counter_moving(rdev, crtc))
1377 while (!dce4_is_in_vblank(rdev, crtc)) {
1378 if (i++ % 100 == 0) {
1379 if (!dce4_is_counter_moving(rdev, crtc))
1386 * evergreen_page_flip - pageflip callback.
1388 * @rdev: radeon_device pointer
1389 * @crtc_id: crtc to cleanup pageflip on
1390 * @crtc_base: new address of the crtc (GPU MC address)
1392 * Triggers the actual pageflip by updating the primary
1393 * surface base address (evergreen+).
1395 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1398 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1400 /* update the scanout addresses */
1401 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1402 async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1403 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1404 upper_32_bits(crtc_base));
1405 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1407 /* post the write */
1408 RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1412 * evergreen_page_flip_pending - check if page flip is still pending
1414 * @rdev: radeon_device pointer
1415 * @crtc_id: crtc to check
1417 * Returns the current update pending status.
1419 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1421 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1423 /* Return current update_pending status: */
1424 return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1425 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1428 /* get temperature in millidegrees */
1429 int evergreen_get_temp(struct radeon_device *rdev)
1432 int actual_temp = 0;
1434 if (rdev->family == CHIP_JUNIPER) {
1435 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1437 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1440 if (toffset & 0x100)
1441 actual_temp = temp / 2 - (0x200 - toffset);
1443 actual_temp = temp / 2 + toffset;
1445 actual_temp = actual_temp * 1000;
1448 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1453 else if (temp & 0x200)
1455 else if (temp & 0x100) {
1456 actual_temp = temp & 0x1ff;
1457 actual_temp |= ~0x1ff;
1459 actual_temp = temp & 0xff;
1461 actual_temp = (actual_temp * 1000) / 2;
1467 int sumo_get_temp(struct radeon_device *rdev)
1469 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1470 int actual_temp = temp - 49;
1472 return actual_temp * 1000;
1476 * sumo_pm_init_profile - Initialize power profiles callback.
1478 * @rdev: radeon_device pointer
1480 * Initialize the power states used in profile mode
1481 * (sumo, trinity, SI).
1482 * Used for profile mode only.
1484 void sumo_pm_init_profile(struct radeon_device *rdev)
1489 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1490 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1491 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1492 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1495 if (rdev->flags & RADEON_IS_MOBILITY)
1496 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1498 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1500 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1501 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1502 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1503 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1505 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1506 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1507 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1508 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1510 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1511 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1512 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1513 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1515 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1516 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1517 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1518 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1521 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1522 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1523 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1524 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1525 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1526 rdev->pm.power_state[idx].num_clock_modes - 1;
1528 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1529 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1530 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1531 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1532 rdev->pm.power_state[idx].num_clock_modes - 1;
1536 * btc_pm_init_profile - Initialize power profiles callback.
1538 * @rdev: radeon_device pointer
1540 * Initialize the power states used in profile mode
1542 * Used for profile mode only.
1544 void btc_pm_init_profile(struct radeon_device *rdev)
1549 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1550 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1551 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1552 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1553 /* starting with BTC, there is one state that is used for both
1554 * MH and SH. Difference is that we always use the high clock index for
1557 if (rdev->flags & RADEON_IS_MOBILITY)
1558 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1560 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1562 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1563 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1564 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1565 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1567 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1568 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1569 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1570 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1572 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1573 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1574 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1575 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1577 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1578 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1579 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1580 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1582 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1583 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1584 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1585 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1587 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1588 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1589 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1590 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1594 * evergreen_pm_misc - set additional pm hw parameters callback.
1596 * @rdev: radeon_device pointer
1598 * Set non-clock parameters associated with a power state
1599 * (voltage, etc.) (evergreen+).
1601 void evergreen_pm_misc(struct radeon_device *rdev)
1603 int req_ps_idx = rdev->pm.requested_power_state_index;
1604 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1605 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1606 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1608 if (voltage->type == VOLTAGE_SW) {
1609 /* 0xff0x are flags rather then an actual voltage */
1610 if ((voltage->voltage & 0xff00) == 0xff00)
1612 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1613 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1614 rdev->pm.current_vddc = voltage->voltage;
1615 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1618 /* starting with BTC, there is one state that is used for both
1619 * MH and SH. Difference is that we always use the high clock index for
1622 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1623 (rdev->family >= CHIP_BARTS) &&
1624 rdev->pm.active_crtc_count &&
1625 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1626 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1627 voltage = &rdev->pm.power_state[req_ps_idx].
1628 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1630 /* 0xff0x are flags rather then an actual voltage */
1631 if ((voltage->vddci & 0xff00) == 0xff00)
1633 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1634 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1635 rdev->pm.current_vddci = voltage->vddci;
1636 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1642 * evergreen_pm_prepare - pre-power state change callback.
1644 * @rdev: radeon_device pointer
1646 * Prepare for a power state change (evergreen+).
1648 void evergreen_pm_prepare(struct radeon_device *rdev)
1650 struct drm_device *ddev = rdev->ddev;
1651 struct drm_crtc *crtc;
1652 struct radeon_crtc *radeon_crtc;
1655 /* disable any active CRTCs */
1656 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1657 radeon_crtc = to_radeon_crtc(crtc);
1658 if (radeon_crtc->enabled) {
1659 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1660 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1661 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1667 * evergreen_pm_finish - post-power state change callback.
1669 * @rdev: radeon_device pointer
1671 * Clean up after a power state change (evergreen+).
1673 void evergreen_pm_finish(struct radeon_device *rdev)
1675 struct drm_device *ddev = rdev->ddev;
1676 struct drm_crtc *crtc;
1677 struct radeon_crtc *radeon_crtc;
1680 /* enable any active CRTCs */
1681 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1682 radeon_crtc = to_radeon_crtc(crtc);
1683 if (radeon_crtc->enabled) {
1684 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1685 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1686 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1692 * evergreen_hpd_sense - hpd sense callback.
1694 * @rdev: radeon_device pointer
1695 * @hpd: hpd (hotplug detect) pin
1697 * Checks if a digital monitor is connected (evergreen+).
1698 * Returns true if connected, false if not connected.
1700 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1702 bool connected = false;
1706 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1710 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1714 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1718 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1722 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1726 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1737 * evergreen_hpd_set_polarity - hpd set polarity callback.
1739 * @rdev: radeon_device pointer
1740 * @hpd: hpd (hotplug detect) pin
1742 * Set the polarity of the hpd pin (evergreen+).
1744 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1745 enum radeon_hpd_id hpd)
1748 bool connected = evergreen_hpd_sense(rdev, hpd);
1752 tmp = RREG32(DC_HPD1_INT_CONTROL);
1754 tmp &= ~DC_HPDx_INT_POLARITY;
1756 tmp |= DC_HPDx_INT_POLARITY;
1757 WREG32(DC_HPD1_INT_CONTROL, tmp);
1760 tmp = RREG32(DC_HPD2_INT_CONTROL);
1762 tmp &= ~DC_HPDx_INT_POLARITY;
1764 tmp |= DC_HPDx_INT_POLARITY;
1765 WREG32(DC_HPD2_INT_CONTROL, tmp);
1768 tmp = RREG32(DC_HPD3_INT_CONTROL);
1770 tmp &= ~DC_HPDx_INT_POLARITY;
1772 tmp |= DC_HPDx_INT_POLARITY;
1773 WREG32(DC_HPD3_INT_CONTROL, tmp);
1776 tmp = RREG32(DC_HPD4_INT_CONTROL);
1778 tmp &= ~DC_HPDx_INT_POLARITY;
1780 tmp |= DC_HPDx_INT_POLARITY;
1781 WREG32(DC_HPD4_INT_CONTROL, tmp);
1784 tmp = RREG32(DC_HPD5_INT_CONTROL);
1786 tmp &= ~DC_HPDx_INT_POLARITY;
1788 tmp |= DC_HPDx_INT_POLARITY;
1789 WREG32(DC_HPD5_INT_CONTROL, tmp);
1792 tmp = RREG32(DC_HPD6_INT_CONTROL);
1794 tmp &= ~DC_HPDx_INT_POLARITY;
1796 tmp |= DC_HPDx_INT_POLARITY;
1797 WREG32(DC_HPD6_INT_CONTROL, tmp);
1805 * evergreen_hpd_init - hpd setup callback.
1807 * @rdev: radeon_device pointer
1809 * Setup the hpd pins used by the card (evergreen+).
1810 * Enable the pin, set the polarity, and enable the hpd interrupts.
1812 void evergreen_hpd_init(struct radeon_device *rdev)
1814 struct drm_device *dev = rdev->ddev;
1815 struct drm_connector *connector;
1816 unsigned enabled = 0;
1817 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1818 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1820 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1821 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1823 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1824 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1825 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1826 * aux dp channel on imac and help (but not completely fix)
1827 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1828 * also avoid interrupt storms during dpms.
1832 switch (radeon_connector->hpd.hpd) {
1834 WREG32(DC_HPD1_CONTROL, tmp);
1837 WREG32(DC_HPD2_CONTROL, tmp);
1840 WREG32(DC_HPD3_CONTROL, tmp);
1843 WREG32(DC_HPD4_CONTROL, tmp);
1846 WREG32(DC_HPD5_CONTROL, tmp);
1849 WREG32(DC_HPD6_CONTROL, tmp);
1854 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1855 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1856 enabled |= 1 << radeon_connector->hpd.hpd;
1858 radeon_irq_kms_enable_hpd(rdev, enabled);
1862 * evergreen_hpd_fini - hpd tear down callback.
1864 * @rdev: radeon_device pointer
1866 * Tear down the hpd pins used by the card (evergreen+).
1867 * Disable the hpd interrupts.
1869 void evergreen_hpd_fini(struct radeon_device *rdev)
1871 struct drm_device *dev = rdev->ddev;
1872 struct drm_connector *connector;
1873 unsigned disabled = 0;
1875 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1876 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1877 switch (radeon_connector->hpd.hpd) {
1879 WREG32(DC_HPD1_CONTROL, 0);
1882 WREG32(DC_HPD2_CONTROL, 0);
1885 WREG32(DC_HPD3_CONTROL, 0);
1888 WREG32(DC_HPD4_CONTROL, 0);
1891 WREG32(DC_HPD5_CONTROL, 0);
1894 WREG32(DC_HPD6_CONTROL, 0);
1899 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1900 disabled |= 1 << radeon_connector->hpd.hpd;
1902 radeon_irq_kms_disable_hpd(rdev, disabled);
1905 /* watermark setup */
1907 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1908 struct radeon_crtc *radeon_crtc,
1909 struct drm_display_mode *mode,
1910 struct drm_display_mode *other_mode)
1912 u32 tmp, buffer_alloc, i;
1913 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1916 * There are 3 line buffers, each one shared by 2 display controllers.
1917 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1918 * the display controllers. The paritioning is done via one of four
1919 * preset allocations specified in bits 2:0:
1920 * first display controller
1921 * 0 - first half of lb (3840 * 2)
1922 * 1 - first 3/4 of lb (5760 * 2)
1923 * 2 - whole lb (7680 * 2), other crtc must be disabled
1924 * 3 - first 1/4 of lb (1920 * 2)
1925 * second display controller
1926 * 4 - second half of lb (3840 * 2)
1927 * 5 - second 3/4 of lb (5760 * 2)
1928 * 6 - whole lb (7680 * 2), other crtc must be disabled
1929 * 7 - last 1/4 of lb (1920 * 2)
1931 /* this can get tricky if we have two large displays on a paired group
1932 * of crtcs. Ideally for multiple large displays we'd assign them to
1933 * non-linked crtcs for maximum line buffer allocation.
1935 if (radeon_crtc->base.enabled && mode) {
1940 tmp = 2; /* whole */
1948 /* second controller of the pair uses second half of the lb */
1949 if (radeon_crtc->crtc_id % 2)
1951 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1953 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1954 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1955 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1956 for (i = 0; i < rdev->usec_timeout; i++) {
1957 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1958 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1964 if (radeon_crtc->base.enabled && mode) {
1969 if (ASIC_IS_DCE5(rdev))
1975 if (ASIC_IS_DCE5(rdev))
1981 if (ASIC_IS_DCE5(rdev))
1987 if (ASIC_IS_DCE5(rdev))
1994 /* controller not enabled, so no lb used */
1998 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2000 u32 tmp = RREG32(MC_SHARED_CHMAP);
2002 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2015 struct evergreen_wm_params {
2016 u32 dram_channels; /* number of dram channels */
2017 u32 yclk; /* bandwidth per dram data pin in kHz */
2018 u32 sclk; /* engine clock in kHz */
2019 u32 disp_clk; /* display clock in kHz */
2020 u32 src_width; /* viewport width */
2021 u32 active_time; /* active display time in ns */
2022 u32 blank_time; /* blank time in ns */
2023 bool interlaced; /* mode is interlaced */
2024 fixed20_12 vsc; /* vertical scale ratio */
2025 u32 num_heads; /* number of active crtcs */
2026 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2027 u32 lb_size; /* line buffer allocated to pipe */
2028 u32 vtaps; /* vertical scaler taps */
2031 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2033 /* Calculate DRAM Bandwidth and the part allocated to display. */
2034 fixed20_12 dram_efficiency; /* 0.7 */
2035 fixed20_12 yclk, dram_channels, bandwidth;
2038 a.full = dfixed_const(1000);
2039 yclk.full = dfixed_const(wm->yclk);
2040 yclk.full = dfixed_div(yclk, a);
2041 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2042 a.full = dfixed_const(10);
2043 dram_efficiency.full = dfixed_const(7);
2044 dram_efficiency.full = dfixed_div(dram_efficiency, a);
2045 bandwidth.full = dfixed_mul(dram_channels, yclk);
2046 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2048 return dfixed_trunc(bandwidth);
2051 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2053 /* Calculate DRAM Bandwidth and the part allocated to display. */
2054 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2055 fixed20_12 yclk, dram_channels, bandwidth;
2058 a.full = dfixed_const(1000);
2059 yclk.full = dfixed_const(wm->yclk);
2060 yclk.full = dfixed_div(yclk, a);
2061 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2062 a.full = dfixed_const(10);
2063 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2064 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2065 bandwidth.full = dfixed_mul(dram_channels, yclk);
2066 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2068 return dfixed_trunc(bandwidth);
2071 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2073 /* Calculate the display Data return Bandwidth */
2074 fixed20_12 return_efficiency; /* 0.8 */
2075 fixed20_12 sclk, bandwidth;
2078 a.full = dfixed_const(1000);
2079 sclk.full = dfixed_const(wm->sclk);
2080 sclk.full = dfixed_div(sclk, a);
2081 a.full = dfixed_const(10);
2082 return_efficiency.full = dfixed_const(8);
2083 return_efficiency.full = dfixed_div(return_efficiency, a);
2084 a.full = dfixed_const(32);
2085 bandwidth.full = dfixed_mul(a, sclk);
2086 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2088 return dfixed_trunc(bandwidth);
2091 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2093 /* Calculate the DMIF Request Bandwidth */
2094 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2095 fixed20_12 disp_clk, bandwidth;
2098 a.full = dfixed_const(1000);
2099 disp_clk.full = dfixed_const(wm->disp_clk);
2100 disp_clk.full = dfixed_div(disp_clk, a);
2101 a.full = dfixed_const(10);
2102 disp_clk_request_efficiency.full = dfixed_const(8);
2103 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2104 a.full = dfixed_const(32);
2105 bandwidth.full = dfixed_mul(a, disp_clk);
2106 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2108 return dfixed_trunc(bandwidth);
2111 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2113 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2114 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2115 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2116 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2118 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2121 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2123 /* Calculate the display mode Average Bandwidth
2124 * DisplayMode should contain the source and destination dimensions,
2128 fixed20_12 line_time;
2129 fixed20_12 src_width;
2130 fixed20_12 bandwidth;
2133 a.full = dfixed_const(1000);
2134 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2135 line_time.full = dfixed_div(line_time, a);
2136 bpp.full = dfixed_const(wm->bytes_per_pixel);
2137 src_width.full = dfixed_const(wm->src_width);
2138 bandwidth.full = dfixed_mul(src_width, bpp);
2139 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2140 bandwidth.full = dfixed_div(bandwidth, line_time);
2142 return dfixed_trunc(bandwidth);
2145 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2147 /* First calcualte the latency in ns */
2148 u32 mc_latency = 2000; /* 2000 ns. */
2149 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2150 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2151 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2152 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2153 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2154 (wm->num_heads * cursor_line_pair_return_time);
2155 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2156 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2159 if (wm->num_heads == 0)
2162 a.full = dfixed_const(2);
2163 b.full = dfixed_const(1);
2164 if ((wm->vsc.full > a.full) ||
2165 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2167 ((wm->vsc.full >= a.full) && wm->interlaced))
2168 max_src_lines_per_dst_line = 4;
2170 max_src_lines_per_dst_line = 2;
2172 a.full = dfixed_const(available_bandwidth);
2173 b.full = dfixed_const(wm->num_heads);
2174 a.full = dfixed_div(a, b);
2176 b.full = dfixed_const(1000);
2177 c.full = dfixed_const(wm->disp_clk);
2178 b.full = dfixed_div(c, b);
2179 c.full = dfixed_const(wm->bytes_per_pixel);
2180 b.full = dfixed_mul(b, c);
2182 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2184 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2185 b.full = dfixed_const(1000);
2186 c.full = dfixed_const(lb_fill_bw);
2187 b.full = dfixed_div(c, b);
2188 a.full = dfixed_div(a, b);
2189 line_fill_time = dfixed_trunc(a);
2191 if (line_fill_time < wm->active_time)
2194 return latency + (line_fill_time - wm->active_time);
2198 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2200 if (evergreen_average_bandwidth(wm) <=
2201 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2207 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2209 if (evergreen_average_bandwidth(wm) <=
2210 (evergreen_available_bandwidth(wm) / wm->num_heads))
2216 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2218 u32 lb_partitions = wm->lb_size / wm->src_width;
2219 u32 line_time = wm->active_time + wm->blank_time;
2220 u32 latency_tolerant_lines;
2224 a.full = dfixed_const(1);
2225 if (wm->vsc.full > a.full)
2226 latency_tolerant_lines = 1;
2228 if (lb_partitions <= (wm->vtaps + 1))
2229 latency_tolerant_lines = 1;
2231 latency_tolerant_lines = 2;
2234 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2236 if (evergreen_latency_watermark(wm) <= latency_hiding)
2242 static void evergreen_program_watermarks(struct radeon_device *rdev,
2243 struct radeon_crtc *radeon_crtc,
2244 u32 lb_size, u32 num_heads)
2246 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2247 struct evergreen_wm_params wm_low, wm_high;
2251 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2252 u32 priority_a_mark = 0, priority_b_mark = 0;
2253 u32 priority_a_cnt = PRIORITY_OFF;
2254 u32 priority_b_cnt = PRIORITY_OFF;
2255 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2256 u32 tmp, arb_control3;
2259 if (radeon_crtc->base.enabled && num_heads && mode) {
2260 pixel_period = 1000000 / (u32)mode->clock;
2261 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2264 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2266 /* watermark for high clocks */
2267 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2269 radeon_dpm_get_mclk(rdev, false) * 10;
2271 radeon_dpm_get_sclk(rdev, false) * 10;
2273 wm_high.yclk = rdev->pm.current_mclk * 10;
2274 wm_high.sclk = rdev->pm.current_sclk * 10;
2277 wm_high.disp_clk = mode->clock;
2278 wm_high.src_width = mode->crtc_hdisplay;
2279 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2280 wm_high.blank_time = line_time - wm_high.active_time;
2281 wm_high.interlaced = false;
2282 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2283 wm_high.interlaced = true;
2284 wm_high.vsc = radeon_crtc->vsc;
2286 if (radeon_crtc->rmx_type != RMX_OFF)
2288 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2289 wm_high.lb_size = lb_size;
2290 wm_high.dram_channels = dram_channels;
2291 wm_high.num_heads = num_heads;
2293 /* watermark for low clocks */
2294 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2296 radeon_dpm_get_mclk(rdev, true) * 10;
2298 radeon_dpm_get_sclk(rdev, true) * 10;
2300 wm_low.yclk = rdev->pm.current_mclk * 10;
2301 wm_low.sclk = rdev->pm.current_sclk * 10;
2304 wm_low.disp_clk = mode->clock;
2305 wm_low.src_width = mode->crtc_hdisplay;
2306 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2307 wm_low.blank_time = line_time - wm_low.active_time;
2308 wm_low.interlaced = false;
2309 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2310 wm_low.interlaced = true;
2311 wm_low.vsc = radeon_crtc->vsc;
2313 if (radeon_crtc->rmx_type != RMX_OFF)
2315 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2316 wm_low.lb_size = lb_size;
2317 wm_low.dram_channels = dram_channels;
2318 wm_low.num_heads = num_heads;
2320 /* set for high clocks */
2321 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2322 /* set for low clocks */
2323 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2325 /* possibly force display priority to high */
2326 /* should really do this at mode validation time... */
2327 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2328 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2329 !evergreen_check_latency_hiding(&wm_high) ||
2330 (rdev->disp_priority == 2)) {
2331 DRM_DEBUG_KMS("force priority a to high\n");
2332 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2334 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2335 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2336 !evergreen_check_latency_hiding(&wm_low) ||
2337 (rdev->disp_priority == 2)) {
2338 DRM_DEBUG_KMS("force priority b to high\n");
2339 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2342 a.full = dfixed_const(1000);
2343 b.full = dfixed_const(mode->clock);
2344 b.full = dfixed_div(b, a);
2345 c.full = dfixed_const(latency_watermark_a);
2346 c.full = dfixed_mul(c, b);
2347 c.full = dfixed_mul(c, radeon_crtc->hsc);
2348 c.full = dfixed_div(c, a);
2349 a.full = dfixed_const(16);
2350 c.full = dfixed_div(c, a);
2351 priority_a_mark = dfixed_trunc(c);
2352 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2354 a.full = dfixed_const(1000);
2355 b.full = dfixed_const(mode->clock);
2356 b.full = dfixed_div(b, a);
2357 c.full = dfixed_const(latency_watermark_b);
2358 c.full = dfixed_mul(c, b);
2359 c.full = dfixed_mul(c, radeon_crtc->hsc);
2360 c.full = dfixed_div(c, a);
2361 a.full = dfixed_const(16);
2362 c.full = dfixed_div(c, a);
2363 priority_b_mark = dfixed_trunc(c);
2364 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2366 /* Save number of lines the linebuffer leads before the scanout */
2367 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2371 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2373 tmp &= ~LATENCY_WATERMARK_MASK(3);
2374 tmp |= LATENCY_WATERMARK_MASK(1);
2375 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2376 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2377 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2378 LATENCY_HIGH_WATERMARK(line_time)));
2380 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2381 tmp &= ~LATENCY_WATERMARK_MASK(3);
2382 tmp |= LATENCY_WATERMARK_MASK(2);
2383 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2384 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2385 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2386 LATENCY_HIGH_WATERMARK(line_time)));
2387 /* restore original selection */
2388 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2390 /* write the priority marks */
2391 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2392 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2394 /* save values for DPM */
2395 radeon_crtc->line_time = line_time;
2396 radeon_crtc->wm_high = latency_watermark_a;
2397 radeon_crtc->wm_low = latency_watermark_b;
2401 * evergreen_bandwidth_update - update display watermarks callback.
2403 * @rdev: radeon_device pointer
2405 * Update the display watermarks based on the requested mode(s)
2408 void evergreen_bandwidth_update(struct radeon_device *rdev)
2410 struct drm_display_mode *mode0 = NULL;
2411 struct drm_display_mode *mode1 = NULL;
2412 u32 num_heads = 0, lb_size;
2415 if (!rdev->mode_info.mode_config_initialized)
2418 radeon_update_display_priority(rdev);
2420 for (i = 0; i < rdev->num_crtc; i++) {
2421 if (rdev->mode_info.crtcs[i]->base.enabled)
2424 for (i = 0; i < rdev->num_crtc; i += 2) {
2425 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2426 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2427 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2428 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2429 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2430 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2435 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2437 * @rdev: radeon_device pointer
2439 * Wait for the MC (memory controller) to be idle.
2441 * Returns 0 if the MC is idle, -1 if not.
2443 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2448 for (i = 0; i < rdev->usec_timeout; i++) {
2449 /* read MC_STATUS */
2450 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2461 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2466 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2468 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2469 for (i = 0; i < rdev->usec_timeout; i++) {
2470 /* read MC_STATUS */
2471 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2472 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2474 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2484 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2489 if (rdev->gart.robj == NULL) {
2490 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2493 r = radeon_gart_table_vram_pin(rdev);
2496 /* Setup L2 cache */
2497 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2498 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2499 EFFECTIVE_L2_QUEUE_SIZE(7));
2500 WREG32(VM_L2_CNTL2, 0);
2501 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2502 /* Setup TLB control */
2503 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2504 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2505 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2506 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2507 if (rdev->flags & RADEON_IS_IGP) {
2508 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2509 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2510 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2512 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2513 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2514 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2515 if ((rdev->family == CHIP_JUNIPER) ||
2516 (rdev->family == CHIP_CYPRESS) ||
2517 (rdev->family == CHIP_HEMLOCK) ||
2518 (rdev->family == CHIP_BARTS))
2519 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2521 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2522 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2523 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2524 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2525 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2526 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2527 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2528 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2529 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2530 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2531 (u32)(rdev->dummy_page.addr >> 12));
2532 WREG32(VM_CONTEXT1_CNTL, 0);
2534 evergreen_pcie_gart_tlb_flush(rdev);
2535 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2536 (unsigned)(rdev->mc.gtt_size >> 20),
2537 (unsigned long long)rdev->gart.table_addr);
2538 rdev->gart.ready = true;
2542 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2546 /* Disable all tables */
2547 WREG32(VM_CONTEXT0_CNTL, 0);
2548 WREG32(VM_CONTEXT1_CNTL, 0);
2550 /* Setup L2 cache */
2551 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2552 EFFECTIVE_L2_QUEUE_SIZE(7));
2553 WREG32(VM_L2_CNTL2, 0);
2554 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2555 /* Setup TLB control */
2556 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2557 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2558 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2559 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2560 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2561 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2562 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2563 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2564 radeon_gart_table_vram_unpin(rdev);
2567 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2569 evergreen_pcie_gart_disable(rdev);
2570 radeon_gart_table_vram_free(rdev);
2571 radeon_gart_fini(rdev);
2575 static void evergreen_agp_enable(struct radeon_device *rdev)
2579 /* Setup L2 cache */
2580 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2581 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2582 EFFECTIVE_L2_QUEUE_SIZE(7));
2583 WREG32(VM_L2_CNTL2, 0);
2584 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2585 /* Setup TLB control */
2586 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2587 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2588 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2589 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2590 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2591 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2592 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2593 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2594 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2595 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2596 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2597 WREG32(VM_CONTEXT0_CNTL, 0);
2598 WREG32(VM_CONTEXT1_CNTL, 0);
2601 static const unsigned ni_dig_offsets[] =
2603 NI_DIG0_REGISTER_OFFSET,
2604 NI_DIG1_REGISTER_OFFSET,
2605 NI_DIG2_REGISTER_OFFSET,
2606 NI_DIG3_REGISTER_OFFSET,
2607 NI_DIG4_REGISTER_OFFSET,
2608 NI_DIG5_REGISTER_OFFSET
2611 static const unsigned ni_tx_offsets[] =
2613 NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2614 NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2615 NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2616 NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2617 NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2618 NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2621 static const unsigned evergreen_dp_offsets[] =
2623 EVERGREEN_DP0_REGISTER_OFFSET,
2624 EVERGREEN_DP1_REGISTER_OFFSET,
2625 EVERGREEN_DP2_REGISTER_OFFSET,
2626 EVERGREEN_DP3_REGISTER_OFFSET,
2627 EVERGREEN_DP4_REGISTER_OFFSET,
2628 EVERGREEN_DP5_REGISTER_OFFSET
2633 * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2634 * We go from crtc to connector and it is not relible since it
2635 * should be an opposite direction .If crtc is enable then
2636 * find the dig_fe which selects this crtc and insure that it enable.
2637 * if such dig_fe is found then find dig_be which selects found dig_be and
2638 * insure that it enable and in DP_SST mode.
2639 * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2640 * from dp symbols clocks .
2642 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2643 unsigned crtc_id, unsigned *ret_dig_fe)
2649 unsigned uniphy_pll;
2650 unsigned digs_fe_selected;
2651 unsigned dig_be_mode;
2652 unsigned dig_fe_mask;
2653 bool is_enabled = false;
2654 bool found_crtc = false;
2656 /* loop through all running dig_fe to find selected crtc */
2657 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2658 dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2659 if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2660 crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2661 /* found running pipe */
2663 dig_fe_mask = 1 << i;
2670 /* loop through all running dig_be to find selected dig_fe */
2671 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2672 dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2673 /* if dig_fe_selected by dig_be? */
2674 digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2675 dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2676 if (dig_fe_mask & digs_fe_selected &&
2677 /* if dig_be in sst mode? */
2678 dig_be_mode == NI_DIG_BE_DPSST) {
2679 dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2681 uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2683 /* dig_be enable and tx is running */
2684 if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2685 dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2686 uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2688 *ret_dig_fe = dig_fe;
2699 * Blank dig when in dp sst mode
2700 * Dig ignores crtc timing
2702 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2705 unsigned stream_ctrl;
2707 unsigned counter = 0;
2709 if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2710 DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2714 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2715 evergreen_dp_offsets[dig_fe]);
2716 if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2717 DRM_ERROR("dig %d , should be enable\n", dig_fe);
2721 stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2722 WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2723 evergreen_dp_offsets[dig_fe], stream_ctrl);
2725 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2726 evergreen_dp_offsets[dig_fe]);
2727 while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2730 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2731 evergreen_dp_offsets[dig_fe]);
2734 DRM_ERROR("counter exceeds %d\n", counter);
2736 fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2737 fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2738 WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2742 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2744 u32 crtc_enabled, tmp, frame_count, blackout;
2748 bzero(save, sizeof(*save)); /* avoid gcc warning */
2749 if (!ASIC_IS_NODCE(rdev)) {
2750 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2751 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2753 /* disable VGA render */
2754 WREG32(VGA_RENDER_CONTROL, 0);
2756 /* blank the display controllers */
2757 for (i = 0; i < rdev->num_crtc; i++) {
2758 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2760 save->crtc_enabled[i] = true;
2761 if (ASIC_IS_DCE6(rdev)) {
2762 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2763 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2764 radeon_wait_for_vblank(rdev, i);
2765 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2766 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2767 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2768 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2771 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2772 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2773 radeon_wait_for_vblank(rdev, i);
2774 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2775 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2776 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2777 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2780 /* wait for the next frame */
2781 frame_count = radeon_get_vblank_counter(rdev, i);
2782 for (j = 0; j < rdev->usec_timeout; j++) {
2783 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2787 /*we should disable dig if it drives dp sst*/
2788 /*but we are in radeon_device_init and the topology is unknown*/
2789 /*and it is available after radeon_modeset_init*/
2790 /*the following method radeon_atom_encoder_dpms_dig*/
2791 /*does the job if we initialize it properly*/
2792 /*for now we do it this manually*/
2794 if (ASIC_IS_DCE5(rdev) &&
2795 evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2796 evergreen_blank_dp_output(rdev, dig_fe);
2797 /*we could remove 6 lines below*/
2798 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2799 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2800 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2801 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2802 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2803 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2804 save->crtc_enabled[i] = false;
2807 save->crtc_enabled[i] = false;
2811 radeon_mc_wait_for_idle(rdev);
2813 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2814 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2815 /* Block CPU access */
2816 WREG32(BIF_FB_EN, 0);
2817 /* blackout the MC */
2818 blackout &= ~BLACKOUT_MODE_MASK;
2819 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2821 /* wait for the MC to settle */
2824 /* lock double buffered regs */
2825 for (i = 0; i < rdev->num_crtc; i++) {
2826 if (save->crtc_enabled[i]) {
2827 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2828 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2829 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2830 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2832 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2835 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2841 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2843 u32 tmp, frame_count;
2846 /* update crtc base addresses */
2847 for (i = 0; i < rdev->num_crtc; i++) {
2848 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2849 upper_32_bits(rdev->mc.vram_start));
2850 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2851 upper_32_bits(rdev->mc.vram_start));
2852 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2853 (u32)rdev->mc.vram_start);
2854 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2855 (u32)rdev->mc.vram_start);
2858 if (!ASIC_IS_NODCE(rdev)) {
2859 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2860 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2863 /* unlock regs and wait for update */
2864 for (i = 0; i < rdev->num_crtc; i++) {
2865 if (save->crtc_enabled[i]) {
2866 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2867 if ((tmp & 0x7) != 3) {
2870 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2872 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2873 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2874 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2875 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2877 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2880 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2882 for (j = 0; j < rdev->usec_timeout; j++) {
2883 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2884 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2891 /* unblackout the MC */
2892 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2893 tmp &= ~BLACKOUT_MODE_MASK;
2894 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2895 /* allow CPU access */
2896 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2898 for (i = 0; i < rdev->num_crtc; i++) {
2899 if (save->crtc_enabled[i]) {
2900 if (ASIC_IS_DCE6(rdev)) {
2901 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2902 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2903 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2904 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2905 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2907 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2908 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2909 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2910 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2911 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2913 /* wait for the next frame */
2914 frame_count = radeon_get_vblank_counter(rdev, i);
2915 for (j = 0; j < rdev->usec_timeout; j++) {
2916 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2922 if (!ASIC_IS_NODCE(rdev)) {
2923 /* Unlock vga access */
2924 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2926 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2930 void evergreen_mc_program(struct radeon_device *rdev)
2932 struct evergreen_mc_save save;
2936 /* Initialize HDP */
2937 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2938 WREG32((0x2c14 + j), 0x00000000);
2939 WREG32((0x2c18 + j), 0x00000000);
2940 WREG32((0x2c1c + j), 0x00000000);
2941 WREG32((0x2c20 + j), 0x00000000);
2942 WREG32((0x2c24 + j), 0x00000000);
2944 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2946 evergreen_mc_stop(rdev, &save);
2947 if (evergreen_mc_wait_for_idle(rdev)) {
2948 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2950 /* Lockout access through VGA aperture*/
2951 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2952 /* Update configuration */
2953 if (rdev->flags & RADEON_IS_AGP) {
2954 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2955 /* VRAM before AGP */
2956 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2957 rdev->mc.vram_start >> 12);
2958 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2959 rdev->mc.gtt_end >> 12);
2961 /* VRAM after AGP */
2962 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2963 rdev->mc.gtt_start >> 12);
2964 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2965 rdev->mc.vram_end >> 12);
2968 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2969 rdev->mc.vram_start >> 12);
2970 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2971 rdev->mc.vram_end >> 12);
2973 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2974 /* llano/ontario only */
2975 if ((rdev->family == CHIP_PALM) ||
2976 (rdev->family == CHIP_SUMO) ||
2977 (rdev->family == CHIP_SUMO2)) {
2978 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2979 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2980 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2981 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2983 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2984 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2985 WREG32(MC_VM_FB_LOCATION, tmp);
2986 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2987 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2988 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2989 if (rdev->flags & RADEON_IS_AGP) {
2990 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2991 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2992 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2994 WREG32(MC_VM_AGP_BASE, 0);
2995 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2996 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2998 if (evergreen_mc_wait_for_idle(rdev)) {
2999 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3001 evergreen_mc_resume(rdev, &save);
3002 /* we need to own VRAM, so turn off the VGA renderer here
3003 * to stop it overwriting our objects */
3004 rv515_vga_render_disable(rdev);
3010 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3012 struct radeon_ring *ring = &rdev->ring[ib->ring];
3015 /* set to DX10/11 mode */
3016 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3017 radeon_ring_write(ring, 1);
3019 if (ring->rptr_save_reg) {
3020 next_rptr = ring->wptr + 3 + 4;
3021 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3022 radeon_ring_write(ring, ((ring->rptr_save_reg -
3023 PACKET3_SET_CONFIG_REG_START) >> 2));
3024 radeon_ring_write(ring, next_rptr);
3025 } else if (rdev->wb.enabled) {
3026 next_rptr = ring->wptr + 5 + 4;
3027 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3028 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3029 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3030 radeon_ring_write(ring, next_rptr);
3031 radeon_ring_write(ring, 0);
3034 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3035 radeon_ring_write(ring,
3039 (ib->gpu_addr & 0xFFFFFFFC));
3040 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3041 radeon_ring_write(ring, ib->length_dw);
3045 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3047 const __be32 *fw_data;
3050 if (!rdev->me_fw || !rdev->pfp_fw)
3058 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3060 fw_data = (const __be32 *)rdev->pfp_fw->data;
3061 WREG32(CP_PFP_UCODE_ADDR, 0);
3062 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3063 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3064 WREG32(CP_PFP_UCODE_ADDR, 0);
3066 fw_data = (const __be32 *)rdev->me_fw->data;
3067 WREG32(CP_ME_RAM_WADDR, 0);
3068 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3069 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3071 WREG32(CP_PFP_UCODE_ADDR, 0);
3072 WREG32(CP_ME_RAM_WADDR, 0);
3073 WREG32(CP_ME_RAM_RADDR, 0);
3077 static int evergreen_cp_start(struct radeon_device *rdev)
3079 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3083 r = radeon_ring_lock(rdev, ring, 7);
3085 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3088 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3089 radeon_ring_write(ring, 0x1);
3090 radeon_ring_write(ring, 0x0);
3091 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3092 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3093 radeon_ring_write(ring, 0);
3094 radeon_ring_write(ring, 0);
3095 radeon_ring_unlock_commit(rdev, ring, false);
3098 WREG32(CP_ME_CNTL, cp_me);
3100 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3102 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3106 /* setup clear context state */
3107 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3108 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3110 for (i = 0; i < evergreen_default_size; i++)
3111 radeon_ring_write(ring, evergreen_default_state[i]);
3113 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3114 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3116 /* set clear context state */
3117 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3118 radeon_ring_write(ring, 0);
3120 /* SQ_VTX_BASE_VTX_LOC */
3121 radeon_ring_write(ring, 0xc0026f00);
3122 radeon_ring_write(ring, 0x00000000);
3123 radeon_ring_write(ring, 0x00000000);
3124 radeon_ring_write(ring, 0x00000000);
3127 radeon_ring_write(ring, 0xc0036f00);
3128 radeon_ring_write(ring, 0x00000bc4);
3129 radeon_ring_write(ring, 0xffffffff);
3130 radeon_ring_write(ring, 0xffffffff);
3131 radeon_ring_write(ring, 0xffffffff);
3133 radeon_ring_write(ring, 0xc0026900);
3134 radeon_ring_write(ring, 0x00000316);
3135 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3136 radeon_ring_write(ring, 0x00000010); /* */
3138 radeon_ring_unlock_commit(rdev, ring, false);
3143 static int evergreen_cp_resume(struct radeon_device *rdev)
3145 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3150 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3151 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3157 RREG32(GRBM_SOFT_RESET);
3159 WREG32(GRBM_SOFT_RESET, 0);
3160 RREG32(GRBM_SOFT_RESET);
3162 /* Set ring buffer size */
3163 rb_bufsz = order_base_2(ring->ring_size / 8);
3164 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3166 tmp |= BUF_SWAP_32BIT;
3168 WREG32(CP_RB_CNTL, tmp);
3169 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3170 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3172 /* Set the write pointer delay */
3173 WREG32(CP_RB_WPTR_DELAY, 0);
3175 /* Initialize the ring buffer's read and write pointers */
3176 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3177 WREG32(CP_RB_RPTR_WR, 0);
3179 WREG32(CP_RB_WPTR, ring->wptr);
3181 /* set the wb address whether it's enabled or not */
3182 WREG32(CP_RB_RPTR_ADDR,
3183 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3184 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3185 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3187 if (rdev->wb.enabled)
3188 WREG32(SCRATCH_UMSK, 0xff);
3190 tmp |= RB_NO_UPDATE;
3191 WREG32(SCRATCH_UMSK, 0);
3195 WREG32(CP_RB_CNTL, tmp);
3197 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3198 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3200 evergreen_cp_start(rdev);
3202 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3204 ring->ready = false;
3213 static void evergreen_gpu_init(struct radeon_device *rdev)
3216 u32 mc_shared_chmap, mc_arb_ramcfg;
3220 u32 sq_lds_resource_mgmt;
3221 u32 sq_gpr_resource_mgmt_1;
3222 u32 sq_gpr_resource_mgmt_2;
3223 u32 sq_gpr_resource_mgmt_3;
3224 u32 sq_thread_resource_mgmt;
3225 u32 sq_thread_resource_mgmt_2;
3226 u32 sq_stack_resource_mgmt_1;
3227 u32 sq_stack_resource_mgmt_2;
3228 u32 sq_stack_resource_mgmt_3;
3229 u32 vgt_cache_invalidation;
3230 u32 hdp_host_path_cntl, tmp;
3231 u32 disabled_rb_mask;
3232 int i, j, ps_thread_count;
3234 switch (rdev->family) {
3237 rdev->config.evergreen.num_ses = 2;
3238 rdev->config.evergreen.max_pipes = 4;
3239 rdev->config.evergreen.max_tile_pipes = 8;
3240 rdev->config.evergreen.max_simds = 10;
3241 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3242 rdev->config.evergreen.max_gprs = 256;
3243 rdev->config.evergreen.max_threads = 248;
3244 rdev->config.evergreen.max_gs_threads = 32;
3245 rdev->config.evergreen.max_stack_entries = 512;
3246 rdev->config.evergreen.sx_num_of_sets = 4;
3247 rdev->config.evergreen.sx_max_export_size = 256;
3248 rdev->config.evergreen.sx_max_export_pos_size = 64;
3249 rdev->config.evergreen.sx_max_export_smx_size = 192;
3250 rdev->config.evergreen.max_hw_contexts = 8;
3251 rdev->config.evergreen.sq_num_cf_insts = 2;
3253 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3254 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3255 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3256 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3259 rdev->config.evergreen.num_ses = 1;
3260 rdev->config.evergreen.max_pipes = 4;
3261 rdev->config.evergreen.max_tile_pipes = 4;
3262 rdev->config.evergreen.max_simds = 10;
3263 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3264 rdev->config.evergreen.max_gprs = 256;
3265 rdev->config.evergreen.max_threads = 248;
3266 rdev->config.evergreen.max_gs_threads = 32;
3267 rdev->config.evergreen.max_stack_entries = 512;
3268 rdev->config.evergreen.sx_num_of_sets = 4;
3269 rdev->config.evergreen.sx_max_export_size = 256;
3270 rdev->config.evergreen.sx_max_export_pos_size = 64;
3271 rdev->config.evergreen.sx_max_export_smx_size = 192;
3272 rdev->config.evergreen.max_hw_contexts = 8;
3273 rdev->config.evergreen.sq_num_cf_insts = 2;
3275 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3276 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3277 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3278 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3281 rdev->config.evergreen.num_ses = 1;
3282 rdev->config.evergreen.max_pipes = 4;
3283 rdev->config.evergreen.max_tile_pipes = 4;
3284 rdev->config.evergreen.max_simds = 5;
3285 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3286 rdev->config.evergreen.max_gprs = 256;
3287 rdev->config.evergreen.max_threads = 248;
3288 rdev->config.evergreen.max_gs_threads = 32;
3289 rdev->config.evergreen.max_stack_entries = 256;
3290 rdev->config.evergreen.sx_num_of_sets = 4;
3291 rdev->config.evergreen.sx_max_export_size = 256;
3292 rdev->config.evergreen.sx_max_export_pos_size = 64;
3293 rdev->config.evergreen.sx_max_export_smx_size = 192;
3294 rdev->config.evergreen.max_hw_contexts = 8;
3295 rdev->config.evergreen.sq_num_cf_insts = 2;
3297 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3298 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3299 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3300 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3304 rdev->config.evergreen.num_ses = 1;
3305 rdev->config.evergreen.max_pipes = 2;
3306 rdev->config.evergreen.max_tile_pipes = 2;
3307 rdev->config.evergreen.max_simds = 2;
3308 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3309 rdev->config.evergreen.max_gprs = 256;
3310 rdev->config.evergreen.max_threads = 192;
3311 rdev->config.evergreen.max_gs_threads = 16;
3312 rdev->config.evergreen.max_stack_entries = 256;
3313 rdev->config.evergreen.sx_num_of_sets = 4;
3314 rdev->config.evergreen.sx_max_export_size = 128;
3315 rdev->config.evergreen.sx_max_export_pos_size = 32;
3316 rdev->config.evergreen.sx_max_export_smx_size = 96;
3317 rdev->config.evergreen.max_hw_contexts = 4;
3318 rdev->config.evergreen.sq_num_cf_insts = 1;
3320 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3321 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3322 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3323 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3326 rdev->config.evergreen.num_ses = 1;
3327 rdev->config.evergreen.max_pipes = 2;
3328 rdev->config.evergreen.max_tile_pipes = 2;
3329 rdev->config.evergreen.max_simds = 2;
3330 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3331 rdev->config.evergreen.max_gprs = 256;
3332 rdev->config.evergreen.max_threads = 192;
3333 rdev->config.evergreen.max_gs_threads = 16;
3334 rdev->config.evergreen.max_stack_entries = 256;
3335 rdev->config.evergreen.sx_num_of_sets = 4;
3336 rdev->config.evergreen.sx_max_export_size = 128;
3337 rdev->config.evergreen.sx_max_export_pos_size = 32;
3338 rdev->config.evergreen.sx_max_export_smx_size = 96;
3339 rdev->config.evergreen.max_hw_contexts = 4;
3340 rdev->config.evergreen.sq_num_cf_insts = 1;
3342 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3343 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3344 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3345 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3348 rdev->config.evergreen.num_ses = 1;
3349 rdev->config.evergreen.max_pipes = 4;
3350 rdev->config.evergreen.max_tile_pipes = 4;
3351 if (rdev->pdev->device == 0x9648)
3352 rdev->config.evergreen.max_simds = 3;
3353 else if ((rdev->pdev->device == 0x9647) ||
3354 (rdev->pdev->device == 0x964a))
3355 rdev->config.evergreen.max_simds = 4;
3357 rdev->config.evergreen.max_simds = 5;
3358 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3359 rdev->config.evergreen.max_gprs = 256;
3360 rdev->config.evergreen.max_threads = 248;
3361 rdev->config.evergreen.max_gs_threads = 32;
3362 rdev->config.evergreen.max_stack_entries = 256;
3363 rdev->config.evergreen.sx_num_of_sets = 4;
3364 rdev->config.evergreen.sx_max_export_size = 256;
3365 rdev->config.evergreen.sx_max_export_pos_size = 64;
3366 rdev->config.evergreen.sx_max_export_smx_size = 192;
3367 rdev->config.evergreen.max_hw_contexts = 8;
3368 rdev->config.evergreen.sq_num_cf_insts = 2;
3370 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3371 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3372 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3373 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3376 rdev->config.evergreen.num_ses = 1;
3377 rdev->config.evergreen.max_pipes = 4;
3378 rdev->config.evergreen.max_tile_pipes = 4;
3379 rdev->config.evergreen.max_simds = 2;
3380 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3381 rdev->config.evergreen.max_gprs = 256;
3382 rdev->config.evergreen.max_threads = 248;
3383 rdev->config.evergreen.max_gs_threads = 32;
3384 rdev->config.evergreen.max_stack_entries = 512;
3385 rdev->config.evergreen.sx_num_of_sets = 4;
3386 rdev->config.evergreen.sx_max_export_size = 256;
3387 rdev->config.evergreen.sx_max_export_pos_size = 64;
3388 rdev->config.evergreen.sx_max_export_smx_size = 192;
3389 rdev->config.evergreen.max_hw_contexts = 4;
3390 rdev->config.evergreen.sq_num_cf_insts = 2;
3392 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3393 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3394 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3395 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3398 rdev->config.evergreen.num_ses = 2;
3399 rdev->config.evergreen.max_pipes = 4;
3400 rdev->config.evergreen.max_tile_pipes = 8;
3401 rdev->config.evergreen.max_simds = 7;
3402 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3403 rdev->config.evergreen.max_gprs = 256;
3404 rdev->config.evergreen.max_threads = 248;
3405 rdev->config.evergreen.max_gs_threads = 32;
3406 rdev->config.evergreen.max_stack_entries = 512;
3407 rdev->config.evergreen.sx_num_of_sets = 4;
3408 rdev->config.evergreen.sx_max_export_size = 256;
3409 rdev->config.evergreen.sx_max_export_pos_size = 64;
3410 rdev->config.evergreen.sx_max_export_smx_size = 192;
3411 rdev->config.evergreen.max_hw_contexts = 8;
3412 rdev->config.evergreen.sq_num_cf_insts = 2;
3414 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3415 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3416 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3417 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3420 rdev->config.evergreen.num_ses = 1;
3421 rdev->config.evergreen.max_pipes = 4;
3422 rdev->config.evergreen.max_tile_pipes = 4;
3423 rdev->config.evergreen.max_simds = 6;
3424 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3425 rdev->config.evergreen.max_gprs = 256;
3426 rdev->config.evergreen.max_threads = 248;
3427 rdev->config.evergreen.max_gs_threads = 32;
3428 rdev->config.evergreen.max_stack_entries = 256;
3429 rdev->config.evergreen.sx_num_of_sets = 4;
3430 rdev->config.evergreen.sx_max_export_size = 256;
3431 rdev->config.evergreen.sx_max_export_pos_size = 64;
3432 rdev->config.evergreen.sx_max_export_smx_size = 192;
3433 rdev->config.evergreen.max_hw_contexts = 8;
3434 rdev->config.evergreen.sq_num_cf_insts = 2;
3436 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3437 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3438 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3439 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3442 rdev->config.evergreen.num_ses = 1;
3443 rdev->config.evergreen.max_pipes = 2;
3444 rdev->config.evergreen.max_tile_pipes = 2;
3445 rdev->config.evergreen.max_simds = 2;
3446 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3447 rdev->config.evergreen.max_gprs = 256;
3448 rdev->config.evergreen.max_threads = 192;
3449 rdev->config.evergreen.max_gs_threads = 16;
3450 rdev->config.evergreen.max_stack_entries = 256;
3451 rdev->config.evergreen.sx_num_of_sets = 4;
3452 rdev->config.evergreen.sx_max_export_size = 128;
3453 rdev->config.evergreen.sx_max_export_pos_size = 32;
3454 rdev->config.evergreen.sx_max_export_smx_size = 96;
3455 rdev->config.evergreen.max_hw_contexts = 4;
3456 rdev->config.evergreen.sq_num_cf_insts = 1;
3458 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3459 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3460 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3461 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3465 /* Initialize HDP */
3466 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3467 WREG32((0x2c14 + j), 0x00000000);
3468 WREG32((0x2c18 + j), 0x00000000);
3469 WREG32((0x2c1c + j), 0x00000000);
3470 WREG32((0x2c20 + j), 0x00000000);
3471 WREG32((0x2c24 + j), 0x00000000);
3474 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3475 WREG32(SRBM_INT_CNTL, 0x1);
3476 WREG32(SRBM_INT_ACK, 0x1);
3478 evergreen_fix_pci_max_read_req_size(rdev);
3480 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3481 if ((rdev->family == CHIP_PALM) ||
3482 (rdev->family == CHIP_SUMO) ||
3483 (rdev->family == CHIP_SUMO2))
3484 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3486 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3488 /* setup tiling info dword. gb_addr_config is not adequate since it does
3489 * not have bank info, so create a custom tiling dword.
3490 * bits 3:0 num_pipes
3491 * bits 7:4 num_banks
3492 * bits 11:8 group_size
3493 * bits 15:12 row_size
3495 rdev->config.evergreen.tile_config = 0;
3496 switch (rdev->config.evergreen.max_tile_pipes) {
3499 rdev->config.evergreen.tile_config |= (0 << 0);
3502 rdev->config.evergreen.tile_config |= (1 << 0);
3505 rdev->config.evergreen.tile_config |= (2 << 0);
3508 rdev->config.evergreen.tile_config |= (3 << 0);
3511 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3512 if (rdev->flags & RADEON_IS_IGP)
3513 rdev->config.evergreen.tile_config |= 1 << 4;
3515 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3516 case 0: /* four banks */
3517 rdev->config.evergreen.tile_config |= 0 << 4;
3519 case 1: /* eight banks */
3520 rdev->config.evergreen.tile_config |= 1 << 4;
3522 case 2: /* sixteen banks */
3524 rdev->config.evergreen.tile_config |= 2 << 4;
3528 rdev->config.evergreen.tile_config |= 0 << 8;
3529 rdev->config.evergreen.tile_config |=
3530 ((gb_addr_config & 0x30000000) >> 28) << 12;
3532 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3536 efuse_straps_4 = RREG32_RCU(0x204);
3537 efuse_straps_3 = RREG32_RCU(0x203);
3538 tmp = (((efuse_straps_4 & 0xf) << 4) |
3539 ((efuse_straps_3 & 0xf0000000) >> 28));
3542 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3543 u32 rb_disable_bitmap;
3545 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3546 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3547 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3549 tmp |= rb_disable_bitmap;
3552 /* enabled rb are just the one not disabled :) */
3553 disabled_rb_mask = tmp;
3555 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3557 /* if all the backends are disabled, fix it up here */
3558 if ((disabled_rb_mask & tmp) == tmp) {
3559 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3560 disabled_rb_mask &= ~(1 << i);
3563 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3564 u32 simd_disable_bitmap;
3566 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3567 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3568 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3569 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3571 tmp |= simd_disable_bitmap;
3573 rdev->config.evergreen.active_simds = hweight32(~tmp);
3575 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3576 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3578 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3579 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3580 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3581 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3582 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3583 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3584 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3586 if ((rdev->config.evergreen.max_backends == 1) &&
3587 (rdev->flags & RADEON_IS_IGP)) {
3588 if ((disabled_rb_mask & 3) == 1) {
3589 /* RB0 disabled, RB1 enabled */
3592 /* RB1 disabled, RB0 enabled */
3596 tmp = gb_addr_config & NUM_PIPES_MASK;
3597 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3598 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3600 WREG32(GB_BACKEND_MAP, tmp);
3602 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3603 WREG32(CGTS_TCC_DISABLE, 0);
3604 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3605 WREG32(CGTS_USER_TCC_DISABLE, 0);
3607 /* set HW defaults for 3D engine */
3608 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3609 ROQ_IB2_START(0x2b)));
3611 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3613 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3618 sx_debug_1 = RREG32(SX_DEBUG_1);
3619 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3620 WREG32(SX_DEBUG_1, sx_debug_1);
3623 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3624 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3625 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3626 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3628 if (rdev->family <= CHIP_SUMO2)
3629 WREG32(SMX_SAR_CTL0, 0x00010000);
3631 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3632 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3633 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3635 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3636 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3637 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3639 WREG32(VGT_NUM_INSTANCES, 1);
3640 WREG32(SPI_CONFIG_CNTL, 0);
3641 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3642 WREG32(CP_PERFMON_CNTL, 0);
3644 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3645 FETCH_FIFO_HIWATER(0x4) |
3646 DONE_FIFO_HIWATER(0xe0) |
3647 ALU_UPDATE_FIFO_HIWATER(0x8)));
3649 sq_config = RREG32(SQ_CONFIG);
3650 sq_config &= ~(PS_PRIO(3) |
3654 sq_config |= (VC_ENABLE |
3661 switch (rdev->family) {
3667 /* no vertex cache */
3668 sq_config &= ~VC_ENABLE;
3674 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3676 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3677 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3678 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3679 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3680 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3681 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3682 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3684 switch (rdev->family) {
3689 ps_thread_count = 96;
3692 ps_thread_count = 128;
3696 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3697 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3698 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3699 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3700 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3701 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3703 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3704 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3705 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3706 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3707 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3708 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3710 WREG32(SQ_CONFIG, sq_config);
3711 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3712 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3713 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3714 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3715 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3716 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3717 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3718 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3719 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3720 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3722 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3723 FORCE_EOV_MAX_REZ_CNT(255)));
3725 switch (rdev->family) {
3731 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3734 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3737 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3738 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3740 WREG32(VGT_GS_VERTEX_REUSE, 16);
3741 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3742 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3744 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3745 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3747 WREG32(CB_PERF_CTR0_SEL_0, 0);
3748 WREG32(CB_PERF_CTR0_SEL_1, 0);
3749 WREG32(CB_PERF_CTR1_SEL_0, 0);
3750 WREG32(CB_PERF_CTR1_SEL_1, 0);
3751 WREG32(CB_PERF_CTR2_SEL_0, 0);
3752 WREG32(CB_PERF_CTR2_SEL_1, 0);
3753 WREG32(CB_PERF_CTR3_SEL_0, 0);
3754 WREG32(CB_PERF_CTR3_SEL_1, 0);
3756 /* clear render buffer base addresses */
3757 WREG32(CB_COLOR0_BASE, 0);
3758 WREG32(CB_COLOR1_BASE, 0);
3759 WREG32(CB_COLOR2_BASE, 0);
3760 WREG32(CB_COLOR3_BASE, 0);
3761 WREG32(CB_COLOR4_BASE, 0);
3762 WREG32(CB_COLOR5_BASE, 0);
3763 WREG32(CB_COLOR6_BASE, 0);
3764 WREG32(CB_COLOR7_BASE, 0);
3765 WREG32(CB_COLOR8_BASE, 0);
3766 WREG32(CB_COLOR9_BASE, 0);
3767 WREG32(CB_COLOR10_BASE, 0);
3768 WREG32(CB_COLOR11_BASE, 0);
3770 /* set the shader const cache sizes to 0 */
3771 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3773 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3776 tmp = RREG32(HDP_MISC_CNTL);
3777 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3778 WREG32(HDP_MISC_CNTL, tmp);
3780 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3781 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3783 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3789 int evergreen_mc_init(struct radeon_device *rdev)
3792 int chansize, numchan;
3794 /* Get VRAM informations */
3795 rdev->mc.vram_is_ddr = true;
3796 if ((rdev->family == CHIP_PALM) ||
3797 (rdev->family == CHIP_SUMO) ||
3798 (rdev->family == CHIP_SUMO2))
3799 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3801 tmp = RREG32(MC_ARB_RAMCFG);
3802 if (tmp & CHANSIZE_OVERRIDE) {
3804 } else if (tmp & CHANSIZE_MASK) {
3809 tmp = RREG32(MC_SHARED_CHMAP);
3810 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3825 rdev->mc.vram_width = numchan * chansize;
3826 /* Could aper size report 0 ? */
3827 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3828 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3829 /* Setup GPU memory space */
3830 if ((rdev->family == CHIP_PALM) ||
3831 (rdev->family == CHIP_SUMO) ||
3832 (rdev->family == CHIP_SUMO2)) {
3833 /* size in bytes on fusion */
3834 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3835 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3837 /* size in MB on evergreen/cayman/tn */
3838 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3839 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3841 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3842 r700_vram_gtt_location(rdev, &rdev->mc);
3843 radeon_update_bandwidth_info(rdev);
3848 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3850 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3851 RREG32(GRBM_STATUS));
3852 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3853 RREG32(GRBM_STATUS_SE0));
3854 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3855 RREG32(GRBM_STATUS_SE1));
3856 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3857 RREG32(SRBM_STATUS));
3858 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3859 RREG32(SRBM_STATUS2));
3860 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3861 RREG32(CP_STALLED_STAT1));
3862 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3863 RREG32(CP_STALLED_STAT2));
3864 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3865 RREG32(CP_BUSY_STAT));
3866 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3868 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3869 RREG32(DMA_STATUS_REG));
3870 if (rdev->family >= CHIP_CAYMAN) {
3871 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3872 RREG32(DMA_STATUS_REG + 0x800));
3876 bool evergreen_is_display_hung(struct radeon_device *rdev)
3882 for (i = 0; i < rdev->num_crtc; i++) {
3883 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3884 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3885 crtc_hung |= (1 << i);
3889 for (j = 0; j < 10; j++) {
3890 for (i = 0; i < rdev->num_crtc; i++) {
3891 if (crtc_hung & (1 << i)) {
3892 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3893 if (tmp != crtc_status[i])
3894 crtc_hung &= ~(1 << i);
3905 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3911 tmp = RREG32(GRBM_STATUS);
3912 if (tmp & (PA_BUSY | SC_BUSY |
3914 TA_BUSY | VGT_BUSY |
3916 SPI_BUSY | VGT_BUSY_NO_DMA))
3917 reset_mask |= RADEON_RESET_GFX;
3919 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3920 CP_BUSY | CP_COHERENCY_BUSY))
3921 reset_mask |= RADEON_RESET_CP;
3923 if (tmp & GRBM_EE_BUSY)
3924 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3926 /* DMA_STATUS_REG */
3927 tmp = RREG32(DMA_STATUS_REG);
3928 if (!(tmp & DMA_IDLE))
3929 reset_mask |= RADEON_RESET_DMA;
3932 tmp = RREG32(SRBM_STATUS2);
3934 reset_mask |= RADEON_RESET_DMA;
3937 tmp = RREG32(SRBM_STATUS);
3938 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3939 reset_mask |= RADEON_RESET_RLC;
3942 reset_mask |= RADEON_RESET_IH;
3945 reset_mask |= RADEON_RESET_SEM;
3947 if (tmp & GRBM_RQ_PENDING)
3948 reset_mask |= RADEON_RESET_GRBM;
3951 reset_mask |= RADEON_RESET_VMC;
3953 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3954 MCC_BUSY | MCD_BUSY))
3955 reset_mask |= RADEON_RESET_MC;
3957 if (evergreen_is_display_hung(rdev))
3958 reset_mask |= RADEON_RESET_DISPLAY;
3961 tmp = RREG32(VM_L2_STATUS);
3963 reset_mask |= RADEON_RESET_VMC;
3965 /* Skip MC reset as it's mostly likely not hung, just busy */
3966 if (reset_mask & RADEON_RESET_MC) {
3967 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3968 reset_mask &= ~RADEON_RESET_MC;
3974 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3976 struct evergreen_mc_save save;
3977 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3980 if (reset_mask == 0)
3983 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3985 evergreen_print_gpu_status_regs(rdev);
3987 /* Disable CP parsing/prefetching */
3988 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3990 if (reset_mask & RADEON_RESET_DMA) {
3992 tmp = RREG32(DMA_RB_CNTL);
3993 tmp &= ~DMA_RB_ENABLE;
3994 WREG32(DMA_RB_CNTL, tmp);
3999 evergreen_mc_stop(rdev, &save);
4000 if (evergreen_mc_wait_for_idle(rdev)) {
4001 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4004 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4005 grbm_soft_reset |= SOFT_RESET_DB |
4018 if (reset_mask & RADEON_RESET_CP) {
4019 grbm_soft_reset |= SOFT_RESET_CP |
4022 srbm_soft_reset |= SOFT_RESET_GRBM;
4025 if (reset_mask & RADEON_RESET_DMA)
4026 srbm_soft_reset |= SOFT_RESET_DMA;
4028 if (reset_mask & RADEON_RESET_DISPLAY)
4029 srbm_soft_reset |= SOFT_RESET_DC;
4031 if (reset_mask & RADEON_RESET_RLC)
4032 srbm_soft_reset |= SOFT_RESET_RLC;
4034 if (reset_mask & RADEON_RESET_SEM)
4035 srbm_soft_reset |= SOFT_RESET_SEM;
4037 if (reset_mask & RADEON_RESET_IH)
4038 srbm_soft_reset |= SOFT_RESET_IH;
4040 if (reset_mask & RADEON_RESET_GRBM)
4041 srbm_soft_reset |= SOFT_RESET_GRBM;
4043 if (reset_mask & RADEON_RESET_VMC)
4044 srbm_soft_reset |= SOFT_RESET_VMC;
4046 if (!(rdev->flags & RADEON_IS_IGP)) {
4047 if (reset_mask & RADEON_RESET_MC)
4048 srbm_soft_reset |= SOFT_RESET_MC;
4051 if (grbm_soft_reset) {
4052 tmp = RREG32(GRBM_SOFT_RESET);
4053 tmp |= grbm_soft_reset;
4054 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4055 WREG32(GRBM_SOFT_RESET, tmp);
4056 tmp = RREG32(GRBM_SOFT_RESET);
4060 tmp &= ~grbm_soft_reset;
4061 WREG32(GRBM_SOFT_RESET, tmp);
4062 tmp = RREG32(GRBM_SOFT_RESET);
4065 if (srbm_soft_reset) {
4066 tmp = RREG32(SRBM_SOFT_RESET);
4067 tmp |= srbm_soft_reset;
4068 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4069 WREG32(SRBM_SOFT_RESET, tmp);
4070 tmp = RREG32(SRBM_SOFT_RESET);
4074 tmp &= ~srbm_soft_reset;
4075 WREG32(SRBM_SOFT_RESET, tmp);
4076 tmp = RREG32(SRBM_SOFT_RESET);
4079 /* Wait a little for things to settle down */
4082 evergreen_mc_resume(rdev, &save);
4085 evergreen_print_gpu_status_regs(rdev);
4088 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4090 struct evergreen_mc_save save;
4093 dev_info(rdev->dev, "GPU pci config reset\n");
4097 /* Disable CP parsing/prefetching */
4098 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4101 tmp = RREG32(DMA_RB_CNTL);
4102 tmp &= ~DMA_RB_ENABLE;
4103 WREG32(DMA_RB_CNTL, tmp);
4104 /* XXX other engines? */
4107 r600_rlc_stop(rdev);
4111 /* set mclk/sclk to bypass */
4112 rv770_set_clk_bypass_mode(rdev);
4114 pci_disable_busmaster(rdev->pdev->dev.bsddev);
4115 /* disable mem access */
4116 evergreen_mc_stop(rdev, &save);
4117 if (evergreen_mc_wait_for_idle(rdev)) {
4118 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4121 radeon_pci_config_reset(rdev);
4122 /* wait for asic to come out of reset */
4123 for (i = 0; i < rdev->usec_timeout; i++) {
4124 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4130 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4135 evergreen_gpu_pci_config_reset(rdev);
4139 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4142 r600_set_bios_scratch_engine_hung(rdev, true);
4144 /* try soft reset */
4145 evergreen_gpu_soft_reset(rdev, reset_mask);
4147 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4149 /* try pci config reset */
4150 if (reset_mask && radeon_hard_reset)
4151 evergreen_gpu_pci_config_reset(rdev);
4153 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4156 r600_set_bios_scratch_engine_hung(rdev, false);
4162 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4164 * @rdev: radeon_device pointer
4165 * @ring: radeon_ring structure holding ring information
4167 * Check if the GFX engine is locked up.
4168 * Returns true if the engine appears to be locked up, false if not.
4170 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4172 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4174 if (!(reset_mask & (RADEON_RESET_GFX |
4175 RADEON_RESET_COMPUTE |
4176 RADEON_RESET_CP))) {
4177 radeon_ring_lockup_update(rdev, ring);
4180 return radeon_ring_test_lockup(rdev, ring);
4186 #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
4187 #define RLC_CLEAR_STATE_END_MARKER 0x00000001
4189 void sumo_rlc_fini(struct radeon_device *rdev)
4193 /* save restore block */
4194 if (rdev->rlc.save_restore_obj) {
4195 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4196 if (unlikely(r != 0))
4197 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4198 radeon_bo_unpin(rdev->rlc.save_restore_obj);
4199 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4201 radeon_bo_unref(&rdev->rlc.save_restore_obj);
4202 rdev->rlc.save_restore_obj = NULL;
4205 /* clear state block */
4206 if (rdev->rlc.clear_state_obj) {
4207 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4208 if (unlikely(r != 0))
4209 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4210 radeon_bo_unpin(rdev->rlc.clear_state_obj);
4211 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4213 radeon_bo_unref(&rdev->rlc.clear_state_obj);
4214 rdev->rlc.clear_state_obj = NULL;
4217 /* clear state block */
4218 if (rdev->rlc.cp_table_obj) {
4219 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4220 if (unlikely(r != 0))
4221 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4222 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4223 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4225 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4226 rdev->rlc.cp_table_obj = NULL;
4230 #define CP_ME_TABLE_SIZE 96
4232 int sumo_rlc_init(struct radeon_device *rdev)
4235 volatile u32 *dst_ptr;
4236 u32 dws, data, i, j, k, reg_num;
4237 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4238 u64 reg_list_mc_addr;
4239 const struct cs_section_def *cs_data;
4244 src_ptr = rdev->rlc.reg_list;
4245 dws = rdev->rlc.reg_list_size;
4246 if (rdev->family >= CHIP_BONAIRE) {
4247 dws += (5 * 16) + 48 + 48 + 64;
4249 cs_data = rdev->rlc.cs_data;
4252 /* save restore block */
4253 if (rdev->rlc.save_restore_obj == NULL) {
4254 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4255 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4256 &rdev->rlc.save_restore_obj);
4258 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4263 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4264 if (unlikely(r != 0)) {
4265 sumo_rlc_fini(rdev);
4268 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4269 &rdev->rlc.save_restore_gpu_addr);
4271 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4272 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4273 sumo_rlc_fini(rdev);
4277 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void**)&vptr);
4279 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4280 sumo_rlc_fini(rdev);
4283 rdev->rlc.sr_ptr = vptr;
4284 /* write the sr buffer */
4285 dst_ptr = rdev->rlc.sr_ptr;
4286 if (rdev->family >= CHIP_TAHITI) {
4288 for (i = 0; i < rdev->rlc.reg_list_size; i++)
4289 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4293 * dw0: (reg2 << 16) | reg1
4294 * dw1: reg1 save space
4295 * dw2: reg2 save space
4297 for (i = 0; i < dws; i++) {
4298 data = src_ptr[i] >> 2;
4301 data |= (src_ptr[i] >> 2) << 16;
4302 j = (((i - 1) * 3) / 2);
4303 dst_ptr[j] = cpu_to_le32(data);
4306 dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4308 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4309 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4313 /* clear state block */
4314 if (rdev->family >= CHIP_BONAIRE) {
4315 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4316 } else if (rdev->family >= CHIP_TAHITI) {
4317 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4318 dws = rdev->rlc.clear_state_size + (256 / 4);
4322 for (i = 0; cs_data[i].section != NULL; i++) {
4323 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4325 dws += cs_data[i].section[j].reg_count;
4328 reg_list_blk_index = (3 * reg_list_num + 2);
4329 dws += reg_list_blk_index;
4330 rdev->rlc.clear_state_size = dws;
4333 if (rdev->rlc.clear_state_obj == NULL) {
4334 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4335 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4336 &rdev->rlc.clear_state_obj);
4338 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4339 sumo_rlc_fini(rdev);
4343 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4344 if (unlikely(r != 0)) {
4345 sumo_rlc_fini(rdev);
4348 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4349 &rdev->rlc.clear_state_gpu_addr);
4351 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4352 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4353 sumo_rlc_fini(rdev);
4357 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void**)&vptr);
4359 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4360 sumo_rlc_fini(rdev);
4363 rdev->rlc.cs_ptr = vptr;
4364 /* set up the cs buffer */
4365 dst_ptr = rdev->rlc.cs_ptr;
4366 if (rdev->family >= CHIP_BONAIRE) {
4367 cik_get_csb_buffer(rdev, dst_ptr);
4368 } else if (rdev->family >= CHIP_TAHITI) {
4369 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4370 dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4371 dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4372 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4373 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4375 reg_list_hdr_blk_index = 0;
4376 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4377 data = upper_32_bits(reg_list_mc_addr);
4378 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4379 reg_list_hdr_blk_index++;
4380 for (i = 0; cs_data[i].section != NULL; i++) {
4381 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4382 reg_num = cs_data[i].section[j].reg_count;
4383 data = reg_list_mc_addr & 0xffffffff;
4384 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4385 reg_list_hdr_blk_index++;
4387 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4388 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4389 reg_list_hdr_blk_index++;
4391 data = 0x08000000 | (reg_num * 4);
4392 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4393 reg_list_hdr_blk_index++;
4395 for (k = 0; k < reg_num; k++) {
4396 data = cs_data[i].section[j].extent[k];
4397 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4399 reg_list_mc_addr += reg_num * 4;
4400 reg_list_blk_index += reg_num;
4403 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4405 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4406 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4409 if (rdev->rlc.cp_table_size) {
4410 if (rdev->rlc.cp_table_obj == NULL) {
4411 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4413 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4414 &rdev->rlc.cp_table_obj);
4416 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4417 sumo_rlc_fini(rdev);
4422 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4423 if (unlikely(r != 0)) {
4424 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4425 sumo_rlc_fini(rdev);
4428 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4429 &rdev->rlc.cp_table_gpu_addr);
4431 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4432 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4433 sumo_rlc_fini(rdev);
4436 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void**)&vptr);
4438 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4439 sumo_rlc_fini(rdev);
4442 rdev->rlc.cp_table_ptr = vptr;
4444 cik_init_cp_pg_table(rdev);
4446 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4447 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4454 static void evergreen_rlc_start(struct radeon_device *rdev)
4456 u32 mask = RLC_ENABLE;
4458 if (rdev->flags & RADEON_IS_IGP) {
4459 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4462 WREG32(RLC_CNTL, mask);
4465 int evergreen_rlc_resume(struct radeon_device *rdev)
4468 const __be32 *fw_data;
4473 r600_rlc_stop(rdev);
4475 WREG32(RLC_HB_CNTL, 0);
4477 if (rdev->flags & RADEON_IS_IGP) {
4478 if (rdev->family == CHIP_ARUBA) {
4479 u32 always_on_bitmap =
4480 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4481 /* find out the number of active simds */
4482 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4483 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4484 tmp = hweight32(~tmp);
4485 if (tmp == rdev->config.cayman.max_simds_per_se) {
4486 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4487 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4488 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4489 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4490 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4493 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4494 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4496 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4497 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4499 WREG32(RLC_HB_BASE, 0);
4500 WREG32(RLC_HB_RPTR, 0);
4501 WREG32(RLC_HB_WPTR, 0);
4502 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4503 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4505 WREG32(RLC_MC_CNTL, 0);
4506 WREG32(RLC_UCODE_CNTL, 0);
4508 fw_data = (const __be32 *)rdev->rlc_fw->data;
4509 if (rdev->family >= CHIP_ARUBA) {
4510 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4511 WREG32(RLC_UCODE_ADDR, i);
4512 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4514 } else if (rdev->family >= CHIP_CAYMAN) {
4515 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4516 WREG32(RLC_UCODE_ADDR, i);
4517 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4520 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4521 WREG32(RLC_UCODE_ADDR, i);
4522 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4525 WREG32(RLC_UCODE_ADDR, 0);
4527 evergreen_rlc_start(rdev);
4534 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4536 if (crtc >= rdev->num_crtc)
4539 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4542 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4546 if (rdev->family >= CHIP_CAYMAN) {
4547 cayman_cp_int_cntl_setup(rdev, 0,
4548 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4549 cayman_cp_int_cntl_setup(rdev, 1, 0);
4550 cayman_cp_int_cntl_setup(rdev, 2, 0);
4551 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4552 WREG32(CAYMAN_DMA1_CNTL, tmp);
4554 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4555 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4556 WREG32(DMA_CNTL, tmp);
4557 WREG32(GRBM_INT_CNTL, 0);
4558 WREG32(SRBM_INT_CNTL, 0);
4559 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4560 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4561 if (rdev->num_crtc >= 4) {
4562 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4563 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4565 if (rdev->num_crtc >= 6) {
4566 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4567 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4570 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4571 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4572 if (rdev->num_crtc >= 4) {
4573 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4574 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4576 if (rdev->num_crtc >= 6) {
4577 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4578 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4581 /* only one DAC on DCE5 */
4582 if (!ASIC_IS_DCE5(rdev))
4583 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4584 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4586 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4587 WREG32(DC_HPD1_INT_CONTROL, tmp);
4588 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4589 WREG32(DC_HPD2_INT_CONTROL, tmp);
4590 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4591 WREG32(DC_HPD3_INT_CONTROL, tmp);
4592 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4593 WREG32(DC_HPD4_INT_CONTROL, tmp);
4594 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4595 WREG32(DC_HPD5_INT_CONTROL, tmp);
4596 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4597 WREG32(DC_HPD6_INT_CONTROL, tmp);
4601 int evergreen_irq_set(struct radeon_device *rdev)
4603 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4604 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4605 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4606 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4607 u32 grbm_int_cntl = 0;
4608 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4609 u32 dma_cntl, dma_cntl1 = 0;
4610 u32 thermal_int = 0;
4612 if (!rdev->irq.installed) {
4613 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4616 /* don't enable anything if the ih is disabled */
4617 if (!rdev->ih.enabled) {
4618 r600_disable_interrupts(rdev);
4619 /* force the active interrupt state to all disabled */
4620 evergreen_disable_interrupt_state(rdev);
4624 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4625 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4626 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4627 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4628 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4629 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4630 if (rdev->family == CHIP_ARUBA)
4631 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4632 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4634 thermal_int = RREG32(CG_THERMAL_INT) &
4635 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4637 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4638 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4639 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4640 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4641 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4642 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4644 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4646 if (rdev->family >= CHIP_CAYMAN) {
4647 /* enable CP interrupts on all rings */
4648 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4649 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4650 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4652 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4653 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4654 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4656 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4657 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4658 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4661 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4662 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4663 cp_int_cntl |= RB_INT_ENABLE;
4664 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4668 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4669 DRM_DEBUG("r600_irq_set: sw int dma\n");
4670 dma_cntl |= TRAP_ENABLE;
4673 if (rdev->family >= CHIP_CAYMAN) {
4674 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4675 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4676 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4677 dma_cntl1 |= TRAP_ENABLE;
4681 if (rdev->irq.dpm_thermal) {
4682 DRM_DEBUG("dpm thermal\n");
4683 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4686 if (rdev->irq.crtc_vblank_int[0] ||
4687 atomic_read(&rdev->irq.pflip[0])) {
4688 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4689 crtc1 |= VBLANK_INT_MASK;
4691 if (rdev->irq.crtc_vblank_int[1] ||
4692 atomic_read(&rdev->irq.pflip[1])) {
4693 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4694 crtc2 |= VBLANK_INT_MASK;
4696 if (rdev->irq.crtc_vblank_int[2] ||
4697 atomic_read(&rdev->irq.pflip[2])) {
4698 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4699 crtc3 |= VBLANK_INT_MASK;
4701 if (rdev->irq.crtc_vblank_int[3] ||
4702 atomic_read(&rdev->irq.pflip[3])) {
4703 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4704 crtc4 |= VBLANK_INT_MASK;
4706 if (rdev->irq.crtc_vblank_int[4] ||
4707 atomic_read(&rdev->irq.pflip[4])) {
4708 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4709 crtc5 |= VBLANK_INT_MASK;
4711 if (rdev->irq.crtc_vblank_int[5] ||
4712 atomic_read(&rdev->irq.pflip[5])) {
4713 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4714 crtc6 |= VBLANK_INT_MASK;
4716 if (rdev->irq.hpd[0]) {
4717 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4718 hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4720 if (rdev->irq.hpd[1]) {
4721 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4722 hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4724 if (rdev->irq.hpd[2]) {
4725 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4726 hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4728 if (rdev->irq.hpd[3]) {
4729 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4730 hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4732 if (rdev->irq.hpd[4]) {
4733 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4734 hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4736 if (rdev->irq.hpd[5]) {
4737 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4738 hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4740 if (rdev->irq.afmt[0]) {
4741 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4742 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4744 if (rdev->irq.afmt[1]) {
4745 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4746 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4748 if (rdev->irq.afmt[2]) {
4749 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4750 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4752 if (rdev->irq.afmt[3]) {
4753 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4754 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4756 if (rdev->irq.afmt[4]) {
4757 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4758 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4760 if (rdev->irq.afmt[5]) {
4761 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4762 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4765 if (rdev->family >= CHIP_CAYMAN) {
4766 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4767 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4768 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4770 WREG32(CP_INT_CNTL, cp_int_cntl);
4772 WREG32(DMA_CNTL, dma_cntl);
4774 if (rdev->family >= CHIP_CAYMAN)
4775 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4777 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4779 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4780 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4781 if (rdev->num_crtc >= 4) {
4782 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4783 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4785 if (rdev->num_crtc >= 6) {
4786 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4787 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4790 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4791 GRPH_PFLIP_INT_MASK);
4792 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4793 GRPH_PFLIP_INT_MASK);
4794 if (rdev->num_crtc >= 4) {
4795 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4796 GRPH_PFLIP_INT_MASK);
4797 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4798 GRPH_PFLIP_INT_MASK);
4800 if (rdev->num_crtc >= 6) {
4801 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4802 GRPH_PFLIP_INT_MASK);
4803 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4804 GRPH_PFLIP_INT_MASK);
4807 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4808 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4809 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4810 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4811 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4812 WREG32(DC_HPD6_INT_CONTROL, hpd6);
4813 if (rdev->family == CHIP_ARUBA)
4814 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4816 WREG32(CG_THERMAL_INT, thermal_int);
4818 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4819 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4820 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4821 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4822 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4823 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4826 RREG32(SRBM_STATUS);
4831 static void evergreen_irq_ack(struct radeon_device *rdev)
4835 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4836 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4837 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4838 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4839 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4840 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4841 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4842 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4843 if (rdev->num_crtc >= 4) {
4844 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4845 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4847 if (rdev->num_crtc >= 6) {
4848 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4849 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4852 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4853 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4854 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4855 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4856 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4857 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4859 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4860 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4861 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4862 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4863 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4864 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4865 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4866 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4867 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4868 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4869 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4870 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4872 if (rdev->num_crtc >= 4) {
4873 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4874 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4875 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4876 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4877 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4878 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4879 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4880 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4881 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4882 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4883 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4884 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4887 if (rdev->num_crtc >= 6) {
4888 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4889 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4890 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4891 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4892 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4893 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4894 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4895 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4896 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4897 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4898 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4899 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4902 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4903 tmp = RREG32(DC_HPD1_INT_CONTROL);
4904 tmp |= DC_HPDx_INT_ACK;
4905 WREG32(DC_HPD1_INT_CONTROL, tmp);
4907 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4908 tmp = RREG32(DC_HPD2_INT_CONTROL);
4909 tmp |= DC_HPDx_INT_ACK;
4910 WREG32(DC_HPD2_INT_CONTROL, tmp);
4912 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4913 tmp = RREG32(DC_HPD3_INT_CONTROL);
4914 tmp |= DC_HPDx_INT_ACK;
4915 WREG32(DC_HPD3_INT_CONTROL, tmp);
4917 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4918 tmp = RREG32(DC_HPD4_INT_CONTROL);
4919 tmp |= DC_HPDx_INT_ACK;
4920 WREG32(DC_HPD4_INT_CONTROL, tmp);
4922 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4923 tmp = RREG32(DC_HPD5_INT_CONTROL);
4924 tmp |= DC_HPDx_INT_ACK;
4925 WREG32(DC_HPD5_INT_CONTROL, tmp);
4927 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4928 tmp = RREG32(DC_HPD5_INT_CONTROL);
4929 tmp |= DC_HPDx_INT_ACK;
4930 WREG32(DC_HPD6_INT_CONTROL, tmp);
4933 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4934 tmp = RREG32(DC_HPD1_INT_CONTROL);
4935 tmp |= DC_HPDx_RX_INT_ACK;
4936 WREG32(DC_HPD1_INT_CONTROL, tmp);
4938 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4939 tmp = RREG32(DC_HPD2_INT_CONTROL);
4940 tmp |= DC_HPDx_RX_INT_ACK;
4941 WREG32(DC_HPD2_INT_CONTROL, tmp);
4943 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4944 tmp = RREG32(DC_HPD3_INT_CONTROL);
4945 tmp |= DC_HPDx_RX_INT_ACK;
4946 WREG32(DC_HPD3_INT_CONTROL, tmp);
4948 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4949 tmp = RREG32(DC_HPD4_INT_CONTROL);
4950 tmp |= DC_HPDx_RX_INT_ACK;
4951 WREG32(DC_HPD4_INT_CONTROL, tmp);
4953 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4954 tmp = RREG32(DC_HPD5_INT_CONTROL);
4955 tmp |= DC_HPDx_RX_INT_ACK;
4956 WREG32(DC_HPD5_INT_CONTROL, tmp);
4958 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4959 tmp = RREG32(DC_HPD5_INT_CONTROL);
4960 tmp |= DC_HPDx_RX_INT_ACK;
4961 WREG32(DC_HPD6_INT_CONTROL, tmp);
4964 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4965 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4966 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4967 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4969 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4970 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4971 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4972 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4974 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4975 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4976 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4977 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4979 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4980 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4981 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4982 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4984 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4985 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4986 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4987 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4989 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4990 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4991 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4992 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4996 static void evergreen_irq_disable(struct radeon_device *rdev)
4998 r600_disable_interrupts(rdev);
4999 /* Wait and acknowledge irq */
5001 evergreen_irq_ack(rdev);
5002 evergreen_disable_interrupt_state(rdev);
5005 void evergreen_irq_suspend(struct radeon_device *rdev)
5007 evergreen_irq_disable(rdev);
5008 r600_rlc_stop(rdev);
5011 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5015 if (rdev->wb.enabled)
5016 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5018 wptr = RREG32(IH_RB_WPTR);
5020 if (wptr & RB_OVERFLOW) {
5021 wptr &= ~RB_OVERFLOW;
5022 /* When a ring buffer overflow happen start parsing interrupt
5023 * from the last not overwritten vector (wptr + 16). Hopefully
5024 * this should allow us to catchup.
5026 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5027 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5028 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5029 tmp = RREG32(IH_RB_CNTL);
5030 tmp |= IH_WPTR_OVERFLOW_CLEAR;
5031 WREG32(IH_RB_CNTL, tmp);
5033 return (wptr & rdev->ih.ptr_mask);
5036 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
5040 u32 src_id, src_data;
5042 bool queue_hotplug = false;
5043 bool queue_hdmi = false;
5044 bool queue_dp = false;
5045 bool queue_thermal = false;
5048 if (!rdev->ih.enabled || rdev->shutdown)
5051 wptr = evergreen_get_ih_wptr(rdev);
5054 /* is somebody else already processing irqs? */
5055 if (atomic_xchg(&rdev->ih.lock, 1))
5058 rptr = rdev->ih.rptr;
5059 DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5061 /* Order reading of wptr vs. reading of IH ring data */
5064 /* display interrupts */
5065 evergreen_irq_ack(rdev);
5067 while (rptr != wptr) {
5068 /* wptr/rptr are in bytes! */
5069 ring_index = rptr / 4;
5070 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5071 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5074 case 1: /* D1 vblank/vline */
5076 case 0: /* D1 vblank */
5077 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5078 DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5080 if (rdev->irq.crtc_vblank_int[0]) {
5081 drm_handle_vblank(rdev->ddev, 0);
5082 rdev->pm.vblank_sync = true;
5083 wake_up(&rdev->irq.vblank_queue);
5085 if (atomic_read(&rdev->irq.pflip[0]))
5086 radeon_crtc_handle_vblank(rdev, 0);
5087 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5088 DRM_DEBUG("IH: D1 vblank\n");
5091 case 1: /* D1 vline */
5092 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5093 DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5095 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5096 DRM_DEBUG("IH: D1 vline\n");
5100 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5104 case 2: /* D2 vblank/vline */
5106 case 0: /* D2 vblank */
5107 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5108 DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5110 if (rdev->irq.crtc_vblank_int[1]) {
5111 drm_handle_vblank(rdev->ddev, 1);
5112 rdev->pm.vblank_sync = true;
5113 wake_up(&rdev->irq.vblank_queue);
5115 if (atomic_read(&rdev->irq.pflip[1]))
5116 radeon_crtc_handle_vblank(rdev, 1);
5117 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5118 DRM_DEBUG("IH: D2 vblank\n");
5121 case 1: /* D2 vline */
5122 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5123 DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5125 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5126 DRM_DEBUG("IH: D2 vline\n");
5130 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5134 case 3: /* D3 vblank/vline */
5136 case 0: /* D3 vblank */
5137 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5138 DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5140 if (rdev->irq.crtc_vblank_int[2]) {
5141 drm_handle_vblank(rdev->ddev, 2);
5142 rdev->pm.vblank_sync = true;
5143 wake_up(&rdev->irq.vblank_queue);
5145 if (atomic_read(&rdev->irq.pflip[2]))
5146 radeon_crtc_handle_vblank(rdev, 2);
5147 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5148 DRM_DEBUG("IH: D3 vblank\n");
5151 case 1: /* D3 vline */
5152 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5153 DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5155 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5156 DRM_DEBUG("IH: D3 vline\n");
5160 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5164 case 4: /* D4 vblank/vline */
5166 case 0: /* D4 vblank */
5167 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5168 DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5170 if (rdev->irq.crtc_vblank_int[3]) {
5171 drm_handle_vblank(rdev->ddev, 3);
5172 rdev->pm.vblank_sync = true;
5173 wake_up(&rdev->irq.vblank_queue);
5175 if (atomic_read(&rdev->irq.pflip[3]))
5176 radeon_crtc_handle_vblank(rdev, 3);
5177 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5178 DRM_DEBUG("IH: D4 vblank\n");
5181 case 1: /* D4 vline */
5182 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5183 DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5185 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5186 DRM_DEBUG("IH: D4 vline\n");
5190 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5194 case 5: /* D5 vblank/vline */
5196 case 0: /* D5 vblank */
5197 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5198 DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5200 if (rdev->irq.crtc_vblank_int[4]) {
5201 drm_handle_vblank(rdev->ddev, 4);
5202 rdev->pm.vblank_sync = true;
5203 wake_up(&rdev->irq.vblank_queue);
5205 if (atomic_read(&rdev->irq.pflip[4]))
5206 radeon_crtc_handle_vblank(rdev, 4);
5207 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5208 DRM_DEBUG("IH: D5 vblank\n");
5211 case 1: /* D5 vline */
5212 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5213 DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5215 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5216 DRM_DEBUG("IH: D5 vline\n");
5220 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5224 case 6: /* D6 vblank/vline */
5226 case 0: /* D6 vblank */
5227 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5228 DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5230 if (rdev->irq.crtc_vblank_int[5]) {
5231 drm_handle_vblank(rdev->ddev, 5);
5232 rdev->pm.vblank_sync = true;
5233 wake_up(&rdev->irq.vblank_queue);
5235 if (atomic_read(&rdev->irq.pflip[5]))
5236 radeon_crtc_handle_vblank(rdev, 5);
5237 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5238 DRM_DEBUG("IH: D6 vblank\n");
5241 case 1: /* D6 vline */
5242 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5243 DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5245 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5246 DRM_DEBUG("IH: D6 vline\n");
5250 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5254 case 8: /* D1 page flip */
5255 case 10: /* D2 page flip */
5256 case 12: /* D3 page flip */
5257 case 14: /* D4 page flip */
5258 case 16: /* D5 page flip */
5259 case 18: /* D6 page flip */
5260 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5261 if (radeon_use_pflipirq > 0)
5262 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5264 case 42: /* HPD hotplug */
5267 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5268 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5270 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5271 queue_hotplug = true;
5272 DRM_DEBUG("IH: HPD1\n");
5275 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5276 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5278 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5279 queue_hotplug = true;
5280 DRM_DEBUG("IH: HPD2\n");
5283 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5284 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5286 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5287 queue_hotplug = true;
5288 DRM_DEBUG("IH: HPD3\n");
5291 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5292 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5294 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5295 queue_hotplug = true;
5296 DRM_DEBUG("IH: HPD4\n");
5299 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5300 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5302 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5303 queue_hotplug = true;
5304 DRM_DEBUG("IH: HPD5\n");
5307 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5308 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5310 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5311 queue_hotplug = true;
5312 DRM_DEBUG("IH: HPD6\n");
5315 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5316 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5318 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5320 DRM_DEBUG("IH: HPD_RX 1\n");
5323 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5324 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5326 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5328 DRM_DEBUG("IH: HPD_RX 2\n");
5331 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5332 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5334 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5336 DRM_DEBUG("IH: HPD_RX 3\n");
5339 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5340 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5342 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5344 DRM_DEBUG("IH: HPD_RX 4\n");
5347 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5348 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5350 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5352 DRM_DEBUG("IH: HPD_RX 5\n");
5355 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5356 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5358 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5360 DRM_DEBUG("IH: HPD_RX 6\n");
5363 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5370 if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5371 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5373 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5375 DRM_DEBUG("IH: HDMI0\n");
5378 if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5379 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5381 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5383 DRM_DEBUG("IH: HDMI1\n");
5386 if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5387 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5389 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5391 DRM_DEBUG("IH: HDMI2\n");
5394 if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5395 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5397 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5399 DRM_DEBUG("IH: HDMI3\n");
5402 if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5403 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5405 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5407 DRM_DEBUG("IH: HDMI4\n");
5410 if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5411 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5413 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5415 DRM_DEBUG("IH: HDMI5\n");
5418 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5422 DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5423 WREG32(SRBM_INT_ACK, 0x1);
5426 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5427 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5431 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5432 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5433 /* reset addr and status */
5434 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5435 if (addr == 0x0 && status == 0x0)
5437 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5438 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
5440 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5442 cayman_vm_decode_fault(rdev, status, addr);
5444 case 176: /* CP_INT in ring buffer */
5445 case 177: /* CP_INT in IB1 */
5446 case 178: /* CP_INT in IB2 */
5447 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5448 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5450 case 181: /* CP EOP event */
5451 DRM_DEBUG("IH: CP EOP\n");
5452 if (rdev->family >= CHIP_CAYMAN) {
5455 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5458 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5461 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5465 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5467 case 224: /* DMA trap event */
5468 DRM_DEBUG("IH: DMA trap\n");
5469 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5471 case 230: /* thermal low to high */
5472 DRM_DEBUG("IH: thermal low to high\n");
5473 rdev->pm.dpm.thermal.high_to_low = false;
5474 queue_thermal = true;
5476 case 231: /* thermal high to low */
5477 DRM_DEBUG("IH: thermal high to low\n");
5478 rdev->pm.dpm.thermal.high_to_low = true;
5479 queue_thermal = true;
5481 case 233: /* GUI IDLE */
5482 DRM_DEBUG("IH: GUI idle\n");
5484 case 244: /* DMA trap event */
5485 if (rdev->family >= CHIP_CAYMAN) {
5486 DRM_DEBUG("IH: DMA1 trap\n");
5487 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5491 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5495 /* wptr/rptr are in bytes! */
5497 rptr &= rdev->ih.ptr_mask;
5498 WREG32(IH_RB_RPTR, rptr);
5501 schedule_work(&rdev->dp_work);
5503 taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
5505 taskqueue_enqueue(rdev->tq, &rdev->audio_work);
5506 if (queue_thermal && rdev->pm.dpm_enabled)
5507 taskqueue_enqueue(rdev->tq, &rdev->pm.dpm.thermal.work);
5508 rdev->ih.rptr = rptr;
5509 atomic_set(&rdev->ih.lock, 0);
5511 /* make sure wptr hasn't changed while processing */
5512 wptr = evergreen_get_ih_wptr(rdev);
5519 static void evergreen_uvd_init(struct radeon_device *rdev)
5526 r = radeon_uvd_init(rdev);
5528 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5530 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5531 * to early fails uvd_v2_2_resume() and thus nothing happens
5532 * there. So it is pointless to try to go through that code
5533 * hence why we disable uvd here.
5538 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5539 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5542 static void evergreen_uvd_start(struct radeon_device *rdev)
5549 r = uvd_v2_2_resume(rdev);
5551 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5554 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5556 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5562 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5565 static void evergreen_uvd_resume(struct radeon_device *rdev)
5567 struct radeon_ring *ring;
5570 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5573 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5574 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, RADEON_CP_PACKET2);
5576 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5579 r = uvd_v1_0_init(rdev);
5581 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5586 static int evergreen_startup(struct radeon_device *rdev)
5588 struct radeon_ring *ring;
5591 /* enable pcie gen2 link */
5592 evergreen_pcie_gen2_enable(rdev);
5594 evergreen_program_aspm(rdev);
5596 /* scratch needs to be initialized before MC */
5597 r = r600_vram_scratch_init(rdev);
5601 evergreen_mc_program(rdev);
5603 if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5604 r = ni_mc_load_microcode(rdev);
5606 DRM_ERROR("Failed to load MC firmware!\n");
5611 if (rdev->flags & RADEON_IS_AGP) {
5612 evergreen_agp_enable(rdev);
5614 r = evergreen_pcie_gart_enable(rdev);
5618 evergreen_gpu_init(rdev);
5620 /* allocate rlc buffers */
5621 if (rdev->flags & RADEON_IS_IGP) {
5622 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5623 rdev->rlc.reg_list_size =
5624 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5625 rdev->rlc.cs_data = evergreen_cs_data;
5626 r = sumo_rlc_init(rdev);
5628 DRM_ERROR("Failed to init rlc BOs!\n");
5633 /* allocate wb buffer */
5634 r = radeon_wb_init(rdev);
5638 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5640 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5644 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5646 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5650 evergreen_uvd_start(rdev);
5653 if (!rdev->irq.installed) {
5654 r = radeon_irq_kms_init(rdev);
5659 r = r600_irq_init(rdev);
5661 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5662 radeon_irq_kms_fini(rdev);
5665 evergreen_irq_set(rdev);
5667 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5668 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5673 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5674 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5675 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5679 r = evergreen_cp_load_microcode(rdev);
5682 r = evergreen_cp_resume(rdev);
5685 r = r600_dma_resume(rdev);
5689 evergreen_uvd_resume(rdev);
5691 r = radeon_ib_pool_init(rdev);
5693 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5697 r = radeon_audio_init(rdev);
5699 DRM_ERROR("radeon: audio init failed\n");
5706 int evergreen_resume(struct radeon_device *rdev)
5710 /* reset the asic, the gfx blocks are often in a bad state
5711 * after the driver is unloaded or after a resume
5713 if (radeon_asic_reset(rdev))
5714 dev_warn(rdev->dev, "GPU reset failed !\n");
5715 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5716 * posting will perform necessary task to bring back GPU into good
5720 atom_asic_init(rdev->mode_info.atom_context);
5722 /* init golden registers */
5723 evergreen_init_golden_registers(rdev);
5725 if (rdev->pm.pm_method == PM_METHOD_DPM)
5726 radeon_pm_resume(rdev);
5728 rdev->accel_working = true;
5729 r = evergreen_startup(rdev);
5731 DRM_ERROR("evergreen startup failed on resume\n");
5732 rdev->accel_working = false;
5740 int evergreen_suspend(struct radeon_device *rdev)
5742 radeon_pm_suspend(rdev);
5743 radeon_audio_fini(rdev);
5744 if (rdev->has_uvd) {
5745 uvd_v1_0_fini(rdev);
5746 radeon_uvd_suspend(rdev);
5749 r600_dma_stop(rdev);
5750 evergreen_irq_suspend(rdev);
5751 radeon_wb_disable(rdev);
5752 evergreen_pcie_gart_disable(rdev);
5757 /* Plan is to move initialization in that function and use
5758 * helper function so that radeon_device_init pretty much
5759 * do nothing more than calling asic specific function. This
5760 * should also allow to remove a bunch of callback function
5763 int evergreen_init(struct radeon_device *rdev)
5768 if (!radeon_get_bios(rdev)) {
5769 if (ASIC_IS_AVIVO(rdev))
5772 /* Must be an ATOMBIOS */
5773 if (!rdev->is_atom_bios) {
5774 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5777 r = radeon_atombios_init(rdev);
5780 /* reset the asic, the gfx blocks are often in a bad state
5781 * after the driver is unloaded or after a resume
5783 if (radeon_asic_reset(rdev))
5784 dev_warn(rdev->dev, "GPU reset failed !\n");
5785 /* Post card if necessary */
5786 if (!radeon_card_posted(rdev)) {
5788 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5791 DRM_INFO("GPU not posted. posting now...\n");
5792 atom_asic_init(rdev->mode_info.atom_context);
5794 /* init golden registers */
5795 evergreen_init_golden_registers(rdev);
5796 /* Initialize scratch registers */
5797 r600_scratch_init(rdev);
5798 /* Initialize surface registers */
5799 radeon_surface_init(rdev);
5800 /* Initialize clocks */
5801 radeon_get_clock_info(rdev->ddev);
5803 r = radeon_fence_driver_init(rdev);
5806 /* initialize AGP */
5807 if (rdev->flags & RADEON_IS_AGP) {
5808 r = radeon_agp_init(rdev);
5810 radeon_agp_disable(rdev);
5812 /* initialize memory controller */
5813 r = evergreen_mc_init(rdev);
5816 /* Memory manager */
5817 r = radeon_bo_init(rdev);
5821 if (ASIC_IS_DCE5(rdev)) {
5822 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5823 r = ni_init_microcode(rdev);
5825 DRM_ERROR("Failed to load firmware!\n");
5830 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5831 r = r600_init_microcode(rdev);
5833 DRM_ERROR("Failed to load firmware!\n");
5839 /* Initialize power management */
5840 radeon_pm_init(rdev);
5842 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5843 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5845 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5846 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5848 evergreen_uvd_init(rdev);
5850 rdev->ih.ring_obj = NULL;
5851 r600_ih_ring_init(rdev, 64 * 1024);
5853 r = r600_pcie_gart_init(rdev);
5857 rdev->accel_working = true;
5858 r = evergreen_startup(rdev);
5860 dev_err(rdev->dev, "disabling GPU acceleration\n");
5862 r600_dma_fini(rdev);
5863 r600_irq_fini(rdev);
5864 if (rdev->flags & RADEON_IS_IGP)
5865 sumo_rlc_fini(rdev);
5866 radeon_wb_fini(rdev);
5867 radeon_ib_pool_fini(rdev);
5868 radeon_irq_kms_fini(rdev);
5869 evergreen_pcie_gart_fini(rdev);
5870 rdev->accel_working = false;
5873 /* Don't start up if the MC ucode is missing on BTC parts.
5874 * The default clocks and voltages before the MC ucode
5875 * is loaded are not suffient for advanced operations.
5877 if (ASIC_IS_DCE5(rdev)) {
5878 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5879 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5887 void evergreen_fini(struct radeon_device *rdev)
5889 radeon_pm_fini(rdev);
5890 radeon_audio_fini(rdev);
5892 r600_dma_fini(rdev);
5893 r600_irq_fini(rdev);
5894 if (rdev->flags & RADEON_IS_IGP)
5895 sumo_rlc_fini(rdev);
5896 radeon_wb_fini(rdev);
5897 radeon_ib_pool_fini(rdev);
5898 radeon_irq_kms_fini(rdev);
5899 uvd_v1_0_fini(rdev);
5900 radeon_uvd_fini(rdev);
5901 evergreen_pcie_gart_fini(rdev);
5902 r600_vram_scratch_fini(rdev);
5903 radeon_gem_fini(rdev);
5904 radeon_fence_driver_fini(rdev);
5905 radeon_agp_fini(rdev);
5906 radeon_bo_fini(rdev);
5907 radeon_atombios_fini(rdev);
5908 if (ASIC_IS_DCE5(rdev))
5909 ni_fini_microcode(rdev);
5911 r600_fini_microcode(rdev);
5916 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5918 u32 link_width_cntl, speed_cntl, mask;
5921 if (radeon_pcie_gen2 == 0)
5924 if (rdev->flags & RADEON_IS_IGP)
5927 if (!(rdev->flags & RADEON_IS_PCIE))
5930 /* x2 cards have a special sequence */
5931 if (ASIC_IS_X2(rdev))
5934 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
5938 if (!(mask & (DRM_PCIE_SPEED_50 | DRM_PCIE_SPEED_80)))
5941 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5942 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5943 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5947 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5949 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5950 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5952 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5953 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5954 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5956 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5957 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5958 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5960 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5961 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5962 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5964 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5965 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5966 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5968 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5969 speed_cntl |= LC_GEN2_EN_STRAP;
5970 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5973 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5974 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5976 link_width_cntl |= LC_UPCONFIGURE_DIS;
5978 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5979 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5983 void evergreen_program_aspm(struct radeon_device *rdev)
5986 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5987 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5988 /* fusion_platform = true
5989 * if the system is a fusion system
5990 * (APU or DGPU in a fusion system).
5991 * todo: check if the system is a fusion platform.
5993 bool fusion_platform = false;
5995 if (radeon_aspm == 0)
5998 if (!(rdev->flags & RADEON_IS_PCIE))
6001 switch (rdev->family) {
6014 disable_l0s = false;
6018 if (rdev->flags & RADEON_IS_IGP)
6019 fusion_platform = true; /* XXX also dGPUs in a fusion system */
6021 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6022 if (fusion_platform)
6027 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6029 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6030 if (fusion_platform)
6035 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6037 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6038 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6040 if (rdev->family >= CHIP_BARTS)
6041 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6043 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6047 if (rdev->family >= CHIP_BARTS)
6048 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6050 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6052 if (!disable_plloff_in_l1) {
6053 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6054 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6055 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6057 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6059 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6060 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6061 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6063 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6065 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6066 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6067 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6069 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6071 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6072 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6073 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6075 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6077 if (rdev->family >= CHIP_BARTS) {
6078 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6079 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6080 data |= PLL_RAMP_UP_TIME_0(4);
6082 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6084 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6085 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6086 data |= PLL_RAMP_UP_TIME_1(4);
6088 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6090 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6091 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6092 data |= PLL_RAMP_UP_TIME_0(4);
6094 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6096 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6097 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6098 data |= PLL_RAMP_UP_TIME_1(4);
6100 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6103 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6104 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6105 data |= LC_DYN_LANES_PWR_STATE(3);
6107 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6109 if (rdev->family >= CHIP_BARTS) {
6110 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6111 data &= ~LS2_EXIT_TIME_MASK;
6112 data |= LS2_EXIT_TIME(1);
6114 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6116 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6117 data &= ~LS2_EXIT_TIME_MASK;
6118 data |= LS2_EXIT_TIME(1);
6120 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6125 /* evergreen parts only */
6126 if (rdev->family < CHIP_BARTS)
6127 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6129 if (pcie_lc_cntl != pcie_lc_cntl_old)
6130 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);