2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 * $FreeBSD: head/sys/dev/drm2/radeon/evergreen.c 254885 2013-08-25 19:37:15Z dumbbell $
29 #include "radeon_asic.h"
30 #include <uapi_drm/radeon_drm.h>
31 #include "evergreend.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
38 static const u32 crtc_offsets[6] =
40 EVERGREEN_CRTC0_REGISTER_OFFSET,
41 EVERGREEN_CRTC1_REGISTER_OFFSET,
42 EVERGREEN_CRTC2_REGISTER_OFFSET,
43 EVERGREEN_CRTC3_REGISTER_OFFSET,
44 EVERGREEN_CRTC4_REGISTER_OFFSET,
45 EVERGREEN_CRTC5_REGISTER_OFFSET
48 #include "clearstate_evergreen.h"
50 static u32 sumo_rlc_save_restore_register_list[] =
134 static u32 sumo_rlc_save_restore_register_list_size = ARRAY_SIZE(sumo_rlc_save_restore_register_list);
136 static void evergreen_gpu_init(struct radeon_device *rdev);
138 static const u32 evergreen_golden_registers[] =
140 0x3f90, 0xffff0000, 0xff000000,
141 0x9148, 0xffff0000, 0xff000000,
142 0x3f94, 0xffff0000, 0xff000000,
143 0x914c, 0xffff0000, 0xff000000,
144 0x9b7c, 0xffffffff, 0x00000000,
145 0x8a14, 0xffffffff, 0x00000007,
146 0x8b10, 0xffffffff, 0x00000000,
147 0x960c, 0xffffffff, 0x54763210,
148 0x88c4, 0xffffffff, 0x000000c2,
149 0x88d4, 0xffffffff, 0x00000010,
150 0x8974, 0xffffffff, 0x00000000,
151 0xc78, 0x00000080, 0x00000080,
152 0x5eb4, 0xffffffff, 0x00000002,
153 0x5e78, 0xffffffff, 0x001000f0,
154 0x6104, 0x01000300, 0x00000000,
155 0x5bc0, 0x00300000, 0x00000000,
156 0x7030, 0xffffffff, 0x00000011,
157 0x7c30, 0xffffffff, 0x00000011,
158 0x10830, 0xffffffff, 0x00000011,
159 0x11430, 0xffffffff, 0x00000011,
160 0x12030, 0xffffffff, 0x00000011,
161 0x12c30, 0xffffffff, 0x00000011,
162 0xd02c, 0xffffffff, 0x08421000,
163 0x240c, 0xffffffff, 0x00000380,
164 0x8b24, 0xffffffff, 0x00ff0fff,
165 0x28a4c, 0x06000000, 0x06000000,
166 0x10c, 0x00000001, 0x00000001,
167 0x8d00, 0xffffffff, 0x100e4848,
168 0x8d04, 0xffffffff, 0x00164745,
169 0x8c00, 0xffffffff, 0xe4000003,
170 0x8c04, 0xffffffff, 0x40600060,
171 0x8c08, 0xffffffff, 0x001c001c,
172 0x8cf0, 0xffffffff, 0x08e00620,
173 0x8c20, 0xffffffff, 0x00800080,
174 0x8c24, 0xffffffff, 0x00800080,
175 0x8c18, 0xffffffff, 0x20202078,
176 0x8c1c, 0xffffffff, 0x00001010,
177 0x28350, 0xffffffff, 0x00000000,
178 0xa008, 0xffffffff, 0x00010000,
179 0x5cc, 0xffffffff, 0x00000001,
180 0x9508, 0xffffffff, 0x00000002,
181 0x913c, 0x0000000f, 0x0000000a
184 static const u32 evergreen_golden_registers2[] =
186 0x2f4c, 0xffffffff, 0x00000000,
187 0x54f4, 0xffffffff, 0x00000000,
188 0x54f0, 0xffffffff, 0x00000000,
189 0x5498, 0xffffffff, 0x00000000,
190 0x549c, 0xffffffff, 0x00000000,
191 0x5494, 0xffffffff, 0x00000000,
192 0x53cc, 0xffffffff, 0x00000000,
193 0x53c8, 0xffffffff, 0x00000000,
194 0x53c4, 0xffffffff, 0x00000000,
195 0x53c0, 0xffffffff, 0x00000000,
196 0x53bc, 0xffffffff, 0x00000000,
197 0x53b8, 0xffffffff, 0x00000000,
198 0x53b4, 0xffffffff, 0x00000000,
199 0x53b0, 0xffffffff, 0x00000000
202 static const u32 cypress_mgcg_init[] =
204 0x802c, 0xffffffff, 0xc0000000,
205 0x5448, 0xffffffff, 0x00000100,
206 0x55e4, 0xffffffff, 0x00000100,
207 0x160c, 0xffffffff, 0x00000100,
208 0x5644, 0xffffffff, 0x00000100,
209 0xc164, 0xffffffff, 0x00000100,
210 0x8a18, 0xffffffff, 0x00000100,
211 0x897c, 0xffffffff, 0x06000100,
212 0x8b28, 0xffffffff, 0x00000100,
213 0x9144, 0xffffffff, 0x00000100,
214 0x9a60, 0xffffffff, 0x00000100,
215 0x9868, 0xffffffff, 0x00000100,
216 0x8d58, 0xffffffff, 0x00000100,
217 0x9510, 0xffffffff, 0x00000100,
218 0x949c, 0xffffffff, 0x00000100,
219 0x9654, 0xffffffff, 0x00000100,
220 0x9030, 0xffffffff, 0x00000100,
221 0x9034, 0xffffffff, 0x00000100,
222 0x9038, 0xffffffff, 0x00000100,
223 0x903c, 0xffffffff, 0x00000100,
224 0x9040, 0xffffffff, 0x00000100,
225 0xa200, 0xffffffff, 0x00000100,
226 0xa204, 0xffffffff, 0x00000100,
227 0xa208, 0xffffffff, 0x00000100,
228 0xa20c, 0xffffffff, 0x00000100,
229 0x971c, 0xffffffff, 0x00000100,
230 0x977c, 0xffffffff, 0x00000100,
231 0x3f80, 0xffffffff, 0x00000100,
232 0xa210, 0xffffffff, 0x00000100,
233 0xa214, 0xffffffff, 0x00000100,
234 0x4d8, 0xffffffff, 0x00000100,
235 0x9784, 0xffffffff, 0x00000100,
236 0x9698, 0xffffffff, 0x00000100,
237 0x4d4, 0xffffffff, 0x00000200,
238 0x30cc, 0xffffffff, 0x00000100,
239 0xd0c0, 0xffffffff, 0xff000100,
240 0x802c, 0xffffffff, 0x40000000,
241 0x915c, 0xffffffff, 0x00010000,
242 0x9160, 0xffffffff, 0x00030002,
243 0x9178, 0xffffffff, 0x00070000,
244 0x917c, 0xffffffff, 0x00030002,
245 0x9180, 0xffffffff, 0x00050004,
246 0x918c, 0xffffffff, 0x00010006,
247 0x9190, 0xffffffff, 0x00090008,
248 0x9194, 0xffffffff, 0x00070000,
249 0x9198, 0xffffffff, 0x00030002,
250 0x919c, 0xffffffff, 0x00050004,
251 0x91a8, 0xffffffff, 0x00010006,
252 0x91ac, 0xffffffff, 0x00090008,
253 0x91b0, 0xffffffff, 0x00070000,
254 0x91b4, 0xffffffff, 0x00030002,
255 0x91b8, 0xffffffff, 0x00050004,
256 0x91c4, 0xffffffff, 0x00010006,
257 0x91c8, 0xffffffff, 0x00090008,
258 0x91cc, 0xffffffff, 0x00070000,
259 0x91d0, 0xffffffff, 0x00030002,
260 0x91d4, 0xffffffff, 0x00050004,
261 0x91e0, 0xffffffff, 0x00010006,
262 0x91e4, 0xffffffff, 0x00090008,
263 0x91e8, 0xffffffff, 0x00000000,
264 0x91ec, 0xffffffff, 0x00070000,
265 0x91f0, 0xffffffff, 0x00030002,
266 0x91f4, 0xffffffff, 0x00050004,
267 0x9200, 0xffffffff, 0x00010006,
268 0x9204, 0xffffffff, 0x00090008,
269 0x9208, 0xffffffff, 0x00070000,
270 0x920c, 0xffffffff, 0x00030002,
271 0x9210, 0xffffffff, 0x00050004,
272 0x921c, 0xffffffff, 0x00010006,
273 0x9220, 0xffffffff, 0x00090008,
274 0x9224, 0xffffffff, 0x00070000,
275 0x9228, 0xffffffff, 0x00030002,
276 0x922c, 0xffffffff, 0x00050004,
277 0x9238, 0xffffffff, 0x00010006,
278 0x923c, 0xffffffff, 0x00090008,
279 0x9240, 0xffffffff, 0x00070000,
280 0x9244, 0xffffffff, 0x00030002,
281 0x9248, 0xffffffff, 0x00050004,
282 0x9254, 0xffffffff, 0x00010006,
283 0x9258, 0xffffffff, 0x00090008,
284 0x925c, 0xffffffff, 0x00070000,
285 0x9260, 0xffffffff, 0x00030002,
286 0x9264, 0xffffffff, 0x00050004,
287 0x9270, 0xffffffff, 0x00010006,
288 0x9274, 0xffffffff, 0x00090008,
289 0x9278, 0xffffffff, 0x00070000,
290 0x927c, 0xffffffff, 0x00030002,
291 0x9280, 0xffffffff, 0x00050004,
292 0x928c, 0xffffffff, 0x00010006,
293 0x9290, 0xffffffff, 0x00090008,
294 0x9294, 0xffffffff, 0x00000000,
295 0x929c, 0xffffffff, 0x00000001,
296 0x802c, 0xffffffff, 0x40010000,
297 0x915c, 0xffffffff, 0x00010000,
298 0x9160, 0xffffffff, 0x00030002,
299 0x9178, 0xffffffff, 0x00070000,
300 0x917c, 0xffffffff, 0x00030002,
301 0x9180, 0xffffffff, 0x00050004,
302 0x918c, 0xffffffff, 0x00010006,
303 0x9190, 0xffffffff, 0x00090008,
304 0x9194, 0xffffffff, 0x00070000,
305 0x9198, 0xffffffff, 0x00030002,
306 0x919c, 0xffffffff, 0x00050004,
307 0x91a8, 0xffffffff, 0x00010006,
308 0x91ac, 0xffffffff, 0x00090008,
309 0x91b0, 0xffffffff, 0x00070000,
310 0x91b4, 0xffffffff, 0x00030002,
311 0x91b8, 0xffffffff, 0x00050004,
312 0x91c4, 0xffffffff, 0x00010006,
313 0x91c8, 0xffffffff, 0x00090008,
314 0x91cc, 0xffffffff, 0x00070000,
315 0x91d0, 0xffffffff, 0x00030002,
316 0x91d4, 0xffffffff, 0x00050004,
317 0x91e0, 0xffffffff, 0x00010006,
318 0x91e4, 0xffffffff, 0x00090008,
319 0x91e8, 0xffffffff, 0x00000000,
320 0x91ec, 0xffffffff, 0x00070000,
321 0x91f0, 0xffffffff, 0x00030002,
322 0x91f4, 0xffffffff, 0x00050004,
323 0x9200, 0xffffffff, 0x00010006,
324 0x9204, 0xffffffff, 0x00090008,
325 0x9208, 0xffffffff, 0x00070000,
326 0x920c, 0xffffffff, 0x00030002,
327 0x9210, 0xffffffff, 0x00050004,
328 0x921c, 0xffffffff, 0x00010006,
329 0x9220, 0xffffffff, 0x00090008,
330 0x9224, 0xffffffff, 0x00070000,
331 0x9228, 0xffffffff, 0x00030002,
332 0x922c, 0xffffffff, 0x00050004,
333 0x9238, 0xffffffff, 0x00010006,
334 0x923c, 0xffffffff, 0x00090008,
335 0x9240, 0xffffffff, 0x00070000,
336 0x9244, 0xffffffff, 0x00030002,
337 0x9248, 0xffffffff, 0x00050004,
338 0x9254, 0xffffffff, 0x00010006,
339 0x9258, 0xffffffff, 0x00090008,
340 0x925c, 0xffffffff, 0x00070000,
341 0x9260, 0xffffffff, 0x00030002,
342 0x9264, 0xffffffff, 0x00050004,
343 0x9270, 0xffffffff, 0x00010006,
344 0x9274, 0xffffffff, 0x00090008,
345 0x9278, 0xffffffff, 0x00070000,
346 0x927c, 0xffffffff, 0x00030002,
347 0x9280, 0xffffffff, 0x00050004,
348 0x928c, 0xffffffff, 0x00010006,
349 0x9290, 0xffffffff, 0x00090008,
350 0x9294, 0xffffffff, 0x00000000,
351 0x929c, 0xffffffff, 0x00000001,
352 0x802c, 0xffffffff, 0xc0000000
355 static const u32 redwood_mgcg_init[] =
357 0x802c, 0xffffffff, 0xc0000000,
358 0x5448, 0xffffffff, 0x00000100,
359 0x55e4, 0xffffffff, 0x00000100,
360 0x160c, 0xffffffff, 0x00000100,
361 0x5644, 0xffffffff, 0x00000100,
362 0xc164, 0xffffffff, 0x00000100,
363 0x8a18, 0xffffffff, 0x00000100,
364 0x897c, 0xffffffff, 0x06000100,
365 0x8b28, 0xffffffff, 0x00000100,
366 0x9144, 0xffffffff, 0x00000100,
367 0x9a60, 0xffffffff, 0x00000100,
368 0x9868, 0xffffffff, 0x00000100,
369 0x8d58, 0xffffffff, 0x00000100,
370 0x9510, 0xffffffff, 0x00000100,
371 0x949c, 0xffffffff, 0x00000100,
372 0x9654, 0xffffffff, 0x00000100,
373 0x9030, 0xffffffff, 0x00000100,
374 0x9034, 0xffffffff, 0x00000100,
375 0x9038, 0xffffffff, 0x00000100,
376 0x903c, 0xffffffff, 0x00000100,
377 0x9040, 0xffffffff, 0x00000100,
378 0xa200, 0xffffffff, 0x00000100,
379 0xa204, 0xffffffff, 0x00000100,
380 0xa208, 0xffffffff, 0x00000100,
381 0xa20c, 0xffffffff, 0x00000100,
382 0x971c, 0xffffffff, 0x00000100,
383 0x977c, 0xffffffff, 0x00000100,
384 0x3f80, 0xffffffff, 0x00000100,
385 0xa210, 0xffffffff, 0x00000100,
386 0xa214, 0xffffffff, 0x00000100,
387 0x4d8, 0xffffffff, 0x00000100,
388 0x9784, 0xffffffff, 0x00000100,
389 0x9698, 0xffffffff, 0x00000100,
390 0x4d4, 0xffffffff, 0x00000200,
391 0x30cc, 0xffffffff, 0x00000100,
392 0xd0c0, 0xffffffff, 0xff000100,
393 0x802c, 0xffffffff, 0x40000000,
394 0x915c, 0xffffffff, 0x00010000,
395 0x9160, 0xffffffff, 0x00030002,
396 0x9178, 0xffffffff, 0x00070000,
397 0x917c, 0xffffffff, 0x00030002,
398 0x9180, 0xffffffff, 0x00050004,
399 0x918c, 0xffffffff, 0x00010006,
400 0x9190, 0xffffffff, 0x00090008,
401 0x9194, 0xffffffff, 0x00070000,
402 0x9198, 0xffffffff, 0x00030002,
403 0x919c, 0xffffffff, 0x00050004,
404 0x91a8, 0xffffffff, 0x00010006,
405 0x91ac, 0xffffffff, 0x00090008,
406 0x91b0, 0xffffffff, 0x00070000,
407 0x91b4, 0xffffffff, 0x00030002,
408 0x91b8, 0xffffffff, 0x00050004,
409 0x91c4, 0xffffffff, 0x00010006,
410 0x91c8, 0xffffffff, 0x00090008,
411 0x91cc, 0xffffffff, 0x00070000,
412 0x91d0, 0xffffffff, 0x00030002,
413 0x91d4, 0xffffffff, 0x00050004,
414 0x91e0, 0xffffffff, 0x00010006,
415 0x91e4, 0xffffffff, 0x00090008,
416 0x91e8, 0xffffffff, 0x00000000,
417 0x91ec, 0xffffffff, 0x00070000,
418 0x91f0, 0xffffffff, 0x00030002,
419 0x91f4, 0xffffffff, 0x00050004,
420 0x9200, 0xffffffff, 0x00010006,
421 0x9204, 0xffffffff, 0x00090008,
422 0x9294, 0xffffffff, 0x00000000,
423 0x929c, 0xffffffff, 0x00000001,
424 0x802c, 0xffffffff, 0xc0000000
427 static const u32 cedar_golden_registers[] =
429 0x3f90, 0xffff0000, 0xff000000,
430 0x9148, 0xffff0000, 0xff000000,
431 0x3f94, 0xffff0000, 0xff000000,
432 0x914c, 0xffff0000, 0xff000000,
433 0x9b7c, 0xffffffff, 0x00000000,
434 0x8a14, 0xffffffff, 0x00000007,
435 0x8b10, 0xffffffff, 0x00000000,
436 0x960c, 0xffffffff, 0x54763210,
437 0x88c4, 0xffffffff, 0x000000c2,
438 0x88d4, 0xffffffff, 0x00000000,
439 0x8974, 0xffffffff, 0x00000000,
440 0xc78, 0x00000080, 0x00000080,
441 0x5eb4, 0xffffffff, 0x00000002,
442 0x5e78, 0xffffffff, 0x001000f0,
443 0x6104, 0x01000300, 0x00000000,
444 0x5bc0, 0x00300000, 0x00000000,
445 0x7030, 0xffffffff, 0x00000011,
446 0x7c30, 0xffffffff, 0x00000011,
447 0x10830, 0xffffffff, 0x00000011,
448 0x11430, 0xffffffff, 0x00000011,
449 0xd02c, 0xffffffff, 0x08421000,
450 0x240c, 0xffffffff, 0x00000380,
451 0x8b24, 0xffffffff, 0x00ff0fff,
452 0x28a4c, 0x06000000, 0x06000000,
453 0x10c, 0x00000001, 0x00000001,
454 0x8d00, 0xffffffff, 0x100e4848,
455 0x8d04, 0xffffffff, 0x00164745,
456 0x8c00, 0xffffffff, 0xe4000003,
457 0x8c04, 0xffffffff, 0x40600060,
458 0x8c08, 0xffffffff, 0x001c001c,
459 0x8cf0, 0xffffffff, 0x08e00410,
460 0x8c20, 0xffffffff, 0x00800080,
461 0x8c24, 0xffffffff, 0x00800080,
462 0x8c18, 0xffffffff, 0x20202078,
463 0x8c1c, 0xffffffff, 0x00001010,
464 0x28350, 0xffffffff, 0x00000000,
465 0xa008, 0xffffffff, 0x00010000,
466 0x5cc, 0xffffffff, 0x00000001,
467 0x9508, 0xffffffff, 0x00000002
470 static const u32 cedar_mgcg_init[] =
472 0x802c, 0xffffffff, 0xc0000000,
473 0x5448, 0xffffffff, 0x00000100,
474 0x55e4, 0xffffffff, 0x00000100,
475 0x160c, 0xffffffff, 0x00000100,
476 0x5644, 0xffffffff, 0x00000100,
477 0xc164, 0xffffffff, 0x00000100,
478 0x8a18, 0xffffffff, 0x00000100,
479 0x897c, 0xffffffff, 0x06000100,
480 0x8b28, 0xffffffff, 0x00000100,
481 0x9144, 0xffffffff, 0x00000100,
482 0x9a60, 0xffffffff, 0x00000100,
483 0x9868, 0xffffffff, 0x00000100,
484 0x8d58, 0xffffffff, 0x00000100,
485 0x9510, 0xffffffff, 0x00000100,
486 0x949c, 0xffffffff, 0x00000100,
487 0x9654, 0xffffffff, 0x00000100,
488 0x9030, 0xffffffff, 0x00000100,
489 0x9034, 0xffffffff, 0x00000100,
490 0x9038, 0xffffffff, 0x00000100,
491 0x903c, 0xffffffff, 0x00000100,
492 0x9040, 0xffffffff, 0x00000100,
493 0xa200, 0xffffffff, 0x00000100,
494 0xa204, 0xffffffff, 0x00000100,
495 0xa208, 0xffffffff, 0x00000100,
496 0xa20c, 0xffffffff, 0x00000100,
497 0x971c, 0xffffffff, 0x00000100,
498 0x977c, 0xffffffff, 0x00000100,
499 0x3f80, 0xffffffff, 0x00000100,
500 0xa210, 0xffffffff, 0x00000100,
501 0xa214, 0xffffffff, 0x00000100,
502 0x4d8, 0xffffffff, 0x00000100,
503 0x9784, 0xffffffff, 0x00000100,
504 0x9698, 0xffffffff, 0x00000100,
505 0x4d4, 0xffffffff, 0x00000200,
506 0x30cc, 0xffffffff, 0x00000100,
507 0xd0c0, 0xffffffff, 0xff000100,
508 0x802c, 0xffffffff, 0x40000000,
509 0x915c, 0xffffffff, 0x00010000,
510 0x9178, 0xffffffff, 0x00050000,
511 0x917c, 0xffffffff, 0x00030002,
512 0x918c, 0xffffffff, 0x00010004,
513 0x9190, 0xffffffff, 0x00070006,
514 0x9194, 0xffffffff, 0x00050000,
515 0x9198, 0xffffffff, 0x00030002,
516 0x91a8, 0xffffffff, 0x00010004,
517 0x91ac, 0xffffffff, 0x00070006,
518 0x91e8, 0xffffffff, 0x00000000,
519 0x9294, 0xffffffff, 0x00000000,
520 0x929c, 0xffffffff, 0x00000001,
521 0x802c, 0xffffffff, 0xc0000000
524 static const u32 juniper_mgcg_init[] =
526 0x802c, 0xffffffff, 0xc0000000,
527 0x5448, 0xffffffff, 0x00000100,
528 0x55e4, 0xffffffff, 0x00000100,
529 0x160c, 0xffffffff, 0x00000100,
530 0x5644, 0xffffffff, 0x00000100,
531 0xc164, 0xffffffff, 0x00000100,
532 0x8a18, 0xffffffff, 0x00000100,
533 0x897c, 0xffffffff, 0x06000100,
534 0x8b28, 0xffffffff, 0x00000100,
535 0x9144, 0xffffffff, 0x00000100,
536 0x9a60, 0xffffffff, 0x00000100,
537 0x9868, 0xffffffff, 0x00000100,
538 0x8d58, 0xffffffff, 0x00000100,
539 0x9510, 0xffffffff, 0x00000100,
540 0x949c, 0xffffffff, 0x00000100,
541 0x9654, 0xffffffff, 0x00000100,
542 0x9030, 0xffffffff, 0x00000100,
543 0x9034, 0xffffffff, 0x00000100,
544 0x9038, 0xffffffff, 0x00000100,
545 0x903c, 0xffffffff, 0x00000100,
546 0x9040, 0xffffffff, 0x00000100,
547 0xa200, 0xffffffff, 0x00000100,
548 0xa204, 0xffffffff, 0x00000100,
549 0xa208, 0xffffffff, 0x00000100,
550 0xa20c, 0xffffffff, 0x00000100,
551 0x971c, 0xffffffff, 0x00000100,
552 0xd0c0, 0xffffffff, 0xff000100,
553 0x802c, 0xffffffff, 0x40000000,
554 0x915c, 0xffffffff, 0x00010000,
555 0x9160, 0xffffffff, 0x00030002,
556 0x9178, 0xffffffff, 0x00070000,
557 0x917c, 0xffffffff, 0x00030002,
558 0x9180, 0xffffffff, 0x00050004,
559 0x918c, 0xffffffff, 0x00010006,
560 0x9190, 0xffffffff, 0x00090008,
561 0x9194, 0xffffffff, 0x00070000,
562 0x9198, 0xffffffff, 0x00030002,
563 0x919c, 0xffffffff, 0x00050004,
564 0x91a8, 0xffffffff, 0x00010006,
565 0x91ac, 0xffffffff, 0x00090008,
566 0x91b0, 0xffffffff, 0x00070000,
567 0x91b4, 0xffffffff, 0x00030002,
568 0x91b8, 0xffffffff, 0x00050004,
569 0x91c4, 0xffffffff, 0x00010006,
570 0x91c8, 0xffffffff, 0x00090008,
571 0x91cc, 0xffffffff, 0x00070000,
572 0x91d0, 0xffffffff, 0x00030002,
573 0x91d4, 0xffffffff, 0x00050004,
574 0x91e0, 0xffffffff, 0x00010006,
575 0x91e4, 0xffffffff, 0x00090008,
576 0x91e8, 0xffffffff, 0x00000000,
577 0x91ec, 0xffffffff, 0x00070000,
578 0x91f0, 0xffffffff, 0x00030002,
579 0x91f4, 0xffffffff, 0x00050004,
580 0x9200, 0xffffffff, 0x00010006,
581 0x9204, 0xffffffff, 0x00090008,
582 0x9208, 0xffffffff, 0x00070000,
583 0x920c, 0xffffffff, 0x00030002,
584 0x9210, 0xffffffff, 0x00050004,
585 0x921c, 0xffffffff, 0x00010006,
586 0x9220, 0xffffffff, 0x00090008,
587 0x9224, 0xffffffff, 0x00070000,
588 0x9228, 0xffffffff, 0x00030002,
589 0x922c, 0xffffffff, 0x00050004,
590 0x9238, 0xffffffff, 0x00010006,
591 0x923c, 0xffffffff, 0x00090008,
592 0x9240, 0xffffffff, 0x00070000,
593 0x9244, 0xffffffff, 0x00030002,
594 0x9248, 0xffffffff, 0x00050004,
595 0x9254, 0xffffffff, 0x00010006,
596 0x9258, 0xffffffff, 0x00090008,
597 0x925c, 0xffffffff, 0x00070000,
598 0x9260, 0xffffffff, 0x00030002,
599 0x9264, 0xffffffff, 0x00050004,
600 0x9270, 0xffffffff, 0x00010006,
601 0x9274, 0xffffffff, 0x00090008,
602 0x9278, 0xffffffff, 0x00070000,
603 0x927c, 0xffffffff, 0x00030002,
604 0x9280, 0xffffffff, 0x00050004,
605 0x928c, 0xffffffff, 0x00010006,
606 0x9290, 0xffffffff, 0x00090008,
607 0x9294, 0xffffffff, 0x00000000,
608 0x929c, 0xffffffff, 0x00000001,
609 0x802c, 0xffffffff, 0xc0000000,
610 0x977c, 0xffffffff, 0x00000100,
611 0x3f80, 0xffffffff, 0x00000100,
612 0xa210, 0xffffffff, 0x00000100,
613 0xa214, 0xffffffff, 0x00000100,
614 0x4d8, 0xffffffff, 0x00000100,
615 0x9784, 0xffffffff, 0x00000100,
616 0x9698, 0xffffffff, 0x00000100,
617 0x4d4, 0xffffffff, 0x00000200,
618 0x30cc, 0xffffffff, 0x00000100,
619 0x802c, 0xffffffff, 0xc0000000
622 static const u32 supersumo_golden_registers[] =
624 0x5eb4, 0xffffffff, 0x00000002,
625 0x5cc, 0xffffffff, 0x00000001,
626 0x7030, 0xffffffff, 0x00000011,
627 0x7c30, 0xffffffff, 0x00000011,
628 0x6104, 0x01000300, 0x00000000,
629 0x5bc0, 0x00300000, 0x00000000,
630 0x8c04, 0xffffffff, 0x40600060,
631 0x8c08, 0xffffffff, 0x001c001c,
632 0x8c20, 0xffffffff, 0x00800080,
633 0x8c24, 0xffffffff, 0x00800080,
634 0x8c18, 0xffffffff, 0x20202078,
635 0x8c1c, 0xffffffff, 0x00001010,
636 0x918c, 0xffffffff, 0x00010006,
637 0x91a8, 0xffffffff, 0x00010006,
638 0x91c4, 0xffffffff, 0x00010006,
639 0x91e0, 0xffffffff, 0x00010006,
640 0x9200, 0xffffffff, 0x00010006,
641 0x9150, 0xffffffff, 0x6e944040,
642 0x917c, 0xffffffff, 0x00030002,
643 0x9180, 0xffffffff, 0x00050004,
644 0x9198, 0xffffffff, 0x00030002,
645 0x919c, 0xffffffff, 0x00050004,
646 0x91b4, 0xffffffff, 0x00030002,
647 0x91b8, 0xffffffff, 0x00050004,
648 0x91d0, 0xffffffff, 0x00030002,
649 0x91d4, 0xffffffff, 0x00050004,
650 0x91f0, 0xffffffff, 0x00030002,
651 0x91f4, 0xffffffff, 0x00050004,
652 0x915c, 0xffffffff, 0x00010000,
653 0x9160, 0xffffffff, 0x00030002,
654 0x3f90, 0xffff0000, 0xff000000,
655 0x9178, 0xffffffff, 0x00070000,
656 0x9194, 0xffffffff, 0x00070000,
657 0x91b0, 0xffffffff, 0x00070000,
658 0x91cc, 0xffffffff, 0x00070000,
659 0x91ec, 0xffffffff, 0x00070000,
660 0x9148, 0xffff0000, 0xff000000,
661 0x9190, 0xffffffff, 0x00090008,
662 0x91ac, 0xffffffff, 0x00090008,
663 0x91c8, 0xffffffff, 0x00090008,
664 0x91e4, 0xffffffff, 0x00090008,
665 0x9204, 0xffffffff, 0x00090008,
666 0x3f94, 0xffff0000, 0xff000000,
667 0x914c, 0xffff0000, 0xff000000,
668 0x929c, 0xffffffff, 0x00000001,
669 0x8a18, 0xffffffff, 0x00000100,
670 0x8b28, 0xffffffff, 0x00000100,
671 0x9144, 0xffffffff, 0x00000100,
672 0x5644, 0xffffffff, 0x00000100,
673 0x9b7c, 0xffffffff, 0x00000000,
674 0x8030, 0xffffffff, 0x0000100a,
675 0x8a14, 0xffffffff, 0x00000007,
676 0x8b24, 0xffffffff, 0x00ff0fff,
677 0x8b10, 0xffffffff, 0x00000000,
678 0x28a4c, 0x06000000, 0x06000000,
679 0x4d8, 0xffffffff, 0x00000100,
680 0x913c, 0xffff000f, 0x0100000a,
681 0x960c, 0xffffffff, 0x54763210,
682 0x88c4, 0xffffffff, 0x000000c2,
683 0x88d4, 0xffffffff, 0x00000010,
684 0x8974, 0xffffffff, 0x00000000,
685 0xc78, 0x00000080, 0x00000080,
686 0x5e78, 0xffffffff, 0x001000f0,
687 0xd02c, 0xffffffff, 0x08421000,
688 0xa008, 0xffffffff, 0x00010000,
689 0x8d00, 0xffffffff, 0x100e4848,
690 0x8d04, 0xffffffff, 0x00164745,
691 0x8c00, 0xffffffff, 0xe4000003,
692 0x8cf0, 0x1fffffff, 0x08e00620,
693 0x28350, 0xffffffff, 0x00000000,
694 0x9508, 0xffffffff, 0x00000002
697 static const u32 sumo_golden_registers[] =
699 0x900c, 0x00ffffff, 0x0017071f,
700 0x8c18, 0xffffffff, 0x10101060,
701 0x8c1c, 0xffffffff, 0x00001010,
702 0x8c30, 0x0000000f, 0x00000005,
703 0x9688, 0x0000000f, 0x00000007
706 static const u32 wrestler_golden_registers[] =
708 0x5eb4, 0xffffffff, 0x00000002,
709 0x5cc, 0xffffffff, 0x00000001,
710 0x7030, 0xffffffff, 0x00000011,
711 0x7c30, 0xffffffff, 0x00000011,
712 0x6104, 0x01000300, 0x00000000,
713 0x5bc0, 0x00300000, 0x00000000,
714 0x918c, 0xffffffff, 0x00010006,
715 0x91a8, 0xffffffff, 0x00010006,
716 0x9150, 0xffffffff, 0x6e944040,
717 0x917c, 0xffffffff, 0x00030002,
718 0x9198, 0xffffffff, 0x00030002,
719 0x915c, 0xffffffff, 0x00010000,
720 0x3f90, 0xffff0000, 0xff000000,
721 0x9178, 0xffffffff, 0x00070000,
722 0x9194, 0xffffffff, 0x00070000,
723 0x9148, 0xffff0000, 0xff000000,
724 0x9190, 0xffffffff, 0x00090008,
725 0x91ac, 0xffffffff, 0x00090008,
726 0x3f94, 0xffff0000, 0xff000000,
727 0x914c, 0xffff0000, 0xff000000,
728 0x929c, 0xffffffff, 0x00000001,
729 0x8a18, 0xffffffff, 0x00000100,
730 0x8b28, 0xffffffff, 0x00000100,
731 0x9144, 0xffffffff, 0x00000100,
732 0x9b7c, 0xffffffff, 0x00000000,
733 0x8030, 0xffffffff, 0x0000100a,
734 0x8a14, 0xffffffff, 0x00000001,
735 0x8b24, 0xffffffff, 0x00ff0fff,
736 0x8b10, 0xffffffff, 0x00000000,
737 0x28a4c, 0x06000000, 0x06000000,
738 0x4d8, 0xffffffff, 0x00000100,
739 0x913c, 0xffff000f, 0x0100000a,
740 0x960c, 0xffffffff, 0x54763210,
741 0x88c4, 0xffffffff, 0x000000c2,
742 0x88d4, 0xffffffff, 0x00000010,
743 0x8974, 0xffffffff, 0x00000000,
744 0xc78, 0x00000080, 0x00000080,
745 0x5e78, 0xffffffff, 0x001000f0,
746 0xd02c, 0xffffffff, 0x08421000,
747 0xa008, 0xffffffff, 0x00010000,
748 0x8d00, 0xffffffff, 0x100e4848,
749 0x8d04, 0xffffffff, 0x00164745,
750 0x8c00, 0xffffffff, 0xe4000003,
751 0x8cf0, 0x1fffffff, 0x08e00410,
752 0x28350, 0xffffffff, 0x00000000,
753 0x9508, 0xffffffff, 0x00000002,
754 0x900c, 0xffffffff, 0x0017071f,
755 0x8c18, 0xffffffff, 0x10101060,
756 0x8c1c, 0xffffffff, 0x00001010
759 static const u32 barts_golden_registers[] =
761 0x5eb4, 0xffffffff, 0x00000002,
762 0x5e78, 0x8f311ff1, 0x001000f0,
763 0x3f90, 0xffff0000, 0xff000000,
764 0x9148, 0xffff0000, 0xff000000,
765 0x3f94, 0xffff0000, 0xff000000,
766 0x914c, 0xffff0000, 0xff000000,
767 0xc78, 0x00000080, 0x00000080,
768 0xbd4, 0x70073777, 0x00010001,
769 0xd02c, 0xbfffff1f, 0x08421000,
770 0xd0b8, 0x03773777, 0x02011003,
771 0x5bc0, 0x00200000, 0x50100000,
772 0x98f8, 0x33773777, 0x02011003,
773 0x98fc, 0xffffffff, 0x76543210,
774 0x7030, 0x31000311, 0x00000011,
775 0x2f48, 0x00000007, 0x02011003,
776 0x6b28, 0x00000010, 0x00000012,
777 0x7728, 0x00000010, 0x00000012,
778 0x10328, 0x00000010, 0x00000012,
779 0x10f28, 0x00000010, 0x00000012,
780 0x11b28, 0x00000010, 0x00000012,
781 0x12728, 0x00000010, 0x00000012,
782 0x240c, 0x000007ff, 0x00000380,
783 0x8a14, 0xf000001f, 0x00000007,
784 0x8b24, 0x3fff3fff, 0x00ff0fff,
785 0x8b10, 0x0000ff0f, 0x00000000,
786 0x28a4c, 0x07ffffff, 0x06000000,
787 0x10c, 0x00000001, 0x00010003,
788 0xa02c, 0xffffffff, 0x0000009b,
789 0x913c, 0x0000000f, 0x0100000a,
790 0x8d00, 0xffff7f7f, 0x100e4848,
791 0x8d04, 0x00ffffff, 0x00164745,
792 0x8c00, 0xfffc0003, 0xe4000003,
793 0x8c04, 0xf8ff00ff, 0x40600060,
794 0x8c08, 0x00ff00ff, 0x001c001c,
795 0x8cf0, 0x1fff1fff, 0x08e00620,
796 0x8c20, 0x0fff0fff, 0x00800080,
797 0x8c24, 0x0fff0fff, 0x00800080,
798 0x8c18, 0xffffffff, 0x20202078,
799 0x8c1c, 0x0000ffff, 0x00001010,
800 0x28350, 0x00000f01, 0x00000000,
801 0x9508, 0x3700001f, 0x00000002,
802 0x960c, 0xffffffff, 0x54763210,
803 0x88c4, 0x001f3ae3, 0x000000c2,
804 0x88d4, 0x0000001f, 0x00000010,
805 0x8974, 0xffffffff, 0x00000000
808 static const u32 turks_golden_registers[] =
810 0x5eb4, 0xffffffff, 0x00000002,
811 0x5e78, 0x8f311ff1, 0x001000f0,
812 0x8c8, 0x00003000, 0x00001070,
813 0x8cc, 0x000fffff, 0x00040035,
814 0x3f90, 0xffff0000, 0xfff00000,
815 0x9148, 0xffff0000, 0xfff00000,
816 0x3f94, 0xffff0000, 0xfff00000,
817 0x914c, 0xffff0000, 0xfff00000,
818 0xc78, 0x00000080, 0x00000080,
819 0xbd4, 0x00073007, 0x00010002,
820 0xd02c, 0xbfffff1f, 0x08421000,
821 0xd0b8, 0x03773777, 0x02010002,
822 0x5bc0, 0x00200000, 0x50100000,
823 0x98f8, 0x33773777, 0x00010002,
824 0x98fc, 0xffffffff, 0x33221100,
825 0x7030, 0x31000311, 0x00000011,
826 0x2f48, 0x33773777, 0x00010002,
827 0x6b28, 0x00000010, 0x00000012,
828 0x7728, 0x00000010, 0x00000012,
829 0x10328, 0x00000010, 0x00000012,
830 0x10f28, 0x00000010, 0x00000012,
831 0x11b28, 0x00000010, 0x00000012,
832 0x12728, 0x00000010, 0x00000012,
833 0x240c, 0x000007ff, 0x00000380,
834 0x8a14, 0xf000001f, 0x00000007,
835 0x8b24, 0x3fff3fff, 0x00ff0fff,
836 0x8b10, 0x0000ff0f, 0x00000000,
837 0x28a4c, 0x07ffffff, 0x06000000,
838 0x10c, 0x00000001, 0x00010003,
839 0xa02c, 0xffffffff, 0x0000009b,
840 0x913c, 0x0000000f, 0x0100000a,
841 0x8d00, 0xffff7f7f, 0x100e4848,
842 0x8d04, 0x00ffffff, 0x00164745,
843 0x8c00, 0xfffc0003, 0xe4000003,
844 0x8c04, 0xf8ff00ff, 0x40600060,
845 0x8c08, 0x00ff00ff, 0x001c001c,
846 0x8cf0, 0x1fff1fff, 0x08e00410,
847 0x8c20, 0x0fff0fff, 0x00800080,
848 0x8c24, 0x0fff0fff, 0x00800080,
849 0x8c18, 0xffffffff, 0x20202078,
850 0x8c1c, 0x0000ffff, 0x00001010,
851 0x28350, 0x00000f01, 0x00000000,
852 0x9508, 0x3700001f, 0x00000002,
853 0x960c, 0xffffffff, 0x54763210,
854 0x88c4, 0x001f3ae3, 0x000000c2,
855 0x88d4, 0x0000001f, 0x00000010,
856 0x8974, 0xffffffff, 0x00000000
859 static const u32 caicos_golden_registers[] =
861 0x5eb4, 0xffffffff, 0x00000002,
862 0x5e78, 0x8f311ff1, 0x001000f0,
863 0x8c8, 0x00003420, 0x00001450,
864 0x8cc, 0x000fffff, 0x00040035,
865 0x3f90, 0xffff0000, 0xfffc0000,
866 0x9148, 0xffff0000, 0xfffc0000,
867 0x3f94, 0xffff0000, 0xfffc0000,
868 0x914c, 0xffff0000, 0xfffc0000,
869 0xc78, 0x00000080, 0x00000080,
870 0xbd4, 0x00073007, 0x00010001,
871 0xd02c, 0xbfffff1f, 0x08421000,
872 0xd0b8, 0x03773777, 0x02010001,
873 0x5bc0, 0x00200000, 0x50100000,
874 0x98f8, 0x33773777, 0x02010001,
875 0x98fc, 0xffffffff, 0x33221100,
876 0x7030, 0x31000311, 0x00000011,
877 0x2f48, 0x33773777, 0x02010001,
878 0x6b28, 0x00000010, 0x00000012,
879 0x7728, 0x00000010, 0x00000012,
880 0x10328, 0x00000010, 0x00000012,
881 0x10f28, 0x00000010, 0x00000012,
882 0x11b28, 0x00000010, 0x00000012,
883 0x12728, 0x00000010, 0x00000012,
884 0x240c, 0x000007ff, 0x00000380,
885 0x8a14, 0xf000001f, 0x00000001,
886 0x8b24, 0x3fff3fff, 0x00ff0fff,
887 0x8b10, 0x0000ff0f, 0x00000000,
888 0x28a4c, 0x07ffffff, 0x06000000,
889 0x10c, 0x00000001, 0x00010003,
890 0xa02c, 0xffffffff, 0x0000009b,
891 0x913c, 0x0000000f, 0x0100000a,
892 0x8d00, 0xffff7f7f, 0x100e4848,
893 0x8d04, 0x00ffffff, 0x00164745,
894 0x8c00, 0xfffc0003, 0xe4000003,
895 0x8c04, 0xf8ff00ff, 0x40600060,
896 0x8c08, 0x00ff00ff, 0x001c001c,
897 0x8cf0, 0x1fff1fff, 0x08e00410,
898 0x8c20, 0x0fff0fff, 0x00800080,
899 0x8c24, 0x0fff0fff, 0x00800080,
900 0x8c18, 0xffffffff, 0x20202078,
901 0x8c1c, 0x0000ffff, 0x00001010,
902 0x28350, 0x00000f01, 0x00000000,
903 0x9508, 0x3700001f, 0x00000002,
904 0x960c, 0xffffffff, 0x54763210,
905 0x88c4, 0x001f3ae3, 0x000000c2,
906 0x88d4, 0x0000001f, 0x00000010,
907 0x8974, 0xffffffff, 0x00000000
910 static void evergreen_init_golden_registers(struct radeon_device *rdev)
912 switch (rdev->family) {
915 radeon_program_register_sequence(rdev,
916 evergreen_golden_registers,
917 (const u32)ARRAY_SIZE(evergreen_golden_registers));
918 radeon_program_register_sequence(rdev,
919 evergreen_golden_registers2,
920 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
921 radeon_program_register_sequence(rdev,
923 (const u32)ARRAY_SIZE(cypress_mgcg_init));
926 radeon_program_register_sequence(rdev,
927 evergreen_golden_registers,
928 (const u32)ARRAY_SIZE(evergreen_golden_registers));
929 radeon_program_register_sequence(rdev,
930 evergreen_golden_registers2,
931 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
932 radeon_program_register_sequence(rdev,
934 (const u32)ARRAY_SIZE(juniper_mgcg_init));
937 radeon_program_register_sequence(rdev,
938 evergreen_golden_registers,
939 (const u32)ARRAY_SIZE(evergreen_golden_registers));
940 radeon_program_register_sequence(rdev,
941 evergreen_golden_registers2,
942 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
943 radeon_program_register_sequence(rdev,
945 (const u32)ARRAY_SIZE(redwood_mgcg_init));
948 radeon_program_register_sequence(rdev,
949 cedar_golden_registers,
950 (const u32)ARRAY_SIZE(cedar_golden_registers));
951 radeon_program_register_sequence(rdev,
952 evergreen_golden_registers2,
953 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
954 radeon_program_register_sequence(rdev,
956 (const u32)ARRAY_SIZE(cedar_mgcg_init));
959 radeon_program_register_sequence(rdev,
960 wrestler_golden_registers,
961 (const u32)ARRAY_SIZE(wrestler_golden_registers));
964 radeon_program_register_sequence(rdev,
965 supersumo_golden_registers,
966 (const u32)ARRAY_SIZE(supersumo_golden_registers));
969 radeon_program_register_sequence(rdev,
970 supersumo_golden_registers,
971 (const u32)ARRAY_SIZE(supersumo_golden_registers));
972 radeon_program_register_sequence(rdev,
973 sumo_golden_registers,
974 (const u32)ARRAY_SIZE(sumo_golden_registers));
977 radeon_program_register_sequence(rdev,
978 barts_golden_registers,
979 (const u32)ARRAY_SIZE(barts_golden_registers));
982 radeon_program_register_sequence(rdev,
983 turks_golden_registers,
984 (const u32)ARRAY_SIZE(turks_golden_registers));
987 radeon_program_register_sequence(rdev,
988 caicos_golden_registers,
989 (const u32)ARRAY_SIZE(caicos_golden_registers));
996 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
997 unsigned *bankh, unsigned *mtaspect,
998 unsigned *tile_split)
1000 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1001 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1002 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1003 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1006 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1007 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1008 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1009 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1013 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1014 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1015 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1016 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1018 switch (*mtaspect) {
1020 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1021 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1022 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1023 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1027 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1028 u32 cntl_reg, u32 status_reg)
1031 struct atom_clock_dividers dividers;
1033 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1034 clock, false, ÷rs);
1038 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1040 for (i = 0; i < 100; i++) {
1041 if (RREG32(status_reg) & DCLK_STATUS)
1051 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1054 u32 cg_scratch = RREG32(CG_SCRATCH1);
1056 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1059 cg_scratch &= 0xffff0000;
1060 cg_scratch |= vclk / 100; /* Mhz */
1062 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1065 cg_scratch &= 0x0000ffff;
1066 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1069 WREG32(CG_SCRATCH1, cg_scratch);
1074 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1076 /* start off with something large */
1077 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1080 /* bypass vclk and dclk with bclk */
1081 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1082 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1083 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1085 /* put PLL in bypass mode */
1086 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1088 if (!vclk || !dclk) {
1089 /* keep the Bypass mode, put PLL to sleep */
1090 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1094 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1095 16384, 0x03FFFFFF, 0, 128, 5,
1096 &fb_div, &vclk_div, &dclk_div);
1100 /* set VCO_MODE to 1 */
1101 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1103 /* toggle UPLL_SLEEP to 1 then back to 0 */
1104 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1105 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1107 /* deassert UPLL_RESET */
1108 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1112 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1116 /* assert UPLL_RESET again */
1117 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1119 /* disable spread spectrum. */
1120 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1122 /* set feedback divider */
1123 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1125 /* set ref divider to 0 */
1126 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1128 if (fb_div < 307200)
1129 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1131 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1133 /* set PDIV_A and PDIV_B */
1134 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1135 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1136 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1138 /* give the PLL some time to settle */
1141 /* deassert PLL_RESET */
1142 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1146 /* switch from bypass mode to normal mode */
1147 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1149 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1153 /* switch VCLK and DCLK selection */
1154 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1155 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1156 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1163 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1168 err = pci_find_extcap(rdev->dev, PCIY_EXPRESS, &cap);
1172 cap += PCIER_DEVCTRL;
1174 ctl = pci_read_config(rdev->dev, cap, 2);
1176 v = (ctl & PCIEM_DEVCTL_MAX_READRQ_MASK) >> 12;
1178 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1179 * to avoid hangs or perfomance issues
1181 if ((v == 0) || (v == 6) || (v == 7)) {
1182 ctl &= ~PCIEM_DEVCTL_MAX_READRQ_MASK;
1184 pci_write_config(rdev->dev, cap, ctl, 2);
1188 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1190 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1196 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1200 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1201 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1210 * dce4_wait_for_vblank - vblank wait asic callback.
1212 * @rdev: radeon_device pointer
1213 * @crtc: crtc to wait for vblank on
1215 * Wait for vblank on the requested crtc (evergreen+).
1217 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1221 if (crtc >= rdev->num_crtc)
1224 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1227 /* depending on when we hit vblank, we may be close to active; if so,
1228 * wait for another frame.
1230 while (dce4_is_in_vblank(rdev, crtc)) {
1231 if (i++ % 100 == 0) {
1232 if (!dce4_is_counter_moving(rdev, crtc))
1237 while (!dce4_is_in_vblank(rdev, crtc)) {
1238 if (i++ % 100 == 0) {
1239 if (!dce4_is_counter_moving(rdev, crtc))
1246 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1248 * @rdev: radeon_device pointer
1249 * @crtc: crtc to prepare for pageflip on
1251 * Pre-pageflip callback (evergreen+).
1252 * Enables the pageflip irq (vblank irq).
1254 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1256 /* enable the pflip int */
1257 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1261 * evergreen_post_page_flip - pos-pageflip callback.
1263 * @rdev: radeon_device pointer
1264 * @crtc: crtc to cleanup pageflip on
1266 * Post-pageflip callback (evergreen+).
1267 * Disables the pageflip irq (vblank irq).
1269 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1271 /* disable the pflip int */
1272 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1276 * evergreen_page_flip - pageflip callback.
1278 * @rdev: radeon_device pointer
1279 * @crtc_id: crtc to cleanup pageflip on
1280 * @crtc_base: new address of the crtc (GPU MC address)
1282 * Does the actual pageflip (evergreen+).
1283 * During vblank we take the crtc lock and wait for the update_pending
1284 * bit to go high, when it does, we release the lock, and allow the
1285 * double buffered update to take place.
1286 * Returns the current update pending status.
1288 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1290 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1291 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1294 /* Lock the graphics update lock */
1295 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1296 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1298 /* update the scanout addresses */
1299 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1300 upper_32_bits(crtc_base));
1301 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1304 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1305 upper_32_bits(crtc_base));
1306 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1309 /* Wait for update_pending to go high. */
1310 for (i = 0; i < rdev->usec_timeout; i++) {
1311 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1315 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1317 /* Unlock the lock, so double-buffering can take place inside vblank */
1318 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1319 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1321 /* Return current update_pending status: */
1322 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1325 /* get temperature in millidegrees */
1326 int evergreen_get_temp(struct radeon_device *rdev)
1329 int actual_temp = 0;
1331 if (rdev->family == CHIP_JUNIPER) {
1332 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1334 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1337 if (toffset & 0x100)
1338 actual_temp = temp / 2 - (0x200 - toffset);
1340 actual_temp = temp / 2 + toffset;
1342 actual_temp = actual_temp * 1000;
1345 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1350 else if (temp & 0x200)
1352 else if (temp & 0x100) {
1353 actual_temp = temp & 0x1ff;
1354 actual_temp |= ~0x1ff;
1356 actual_temp = temp & 0xff;
1358 actual_temp = (actual_temp * 1000) / 2;
1364 int sumo_get_temp(struct radeon_device *rdev)
1366 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1367 int actual_temp = temp - 49;
1369 return actual_temp * 1000;
1373 * sumo_pm_init_profile - Initialize power profiles callback.
1375 * @rdev: radeon_device pointer
1377 * Initialize the power states used in profile mode
1378 * (sumo, trinity, SI).
1379 * Used for profile mode only.
1381 void sumo_pm_init_profile(struct radeon_device *rdev)
1386 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1387 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1388 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1389 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1392 if (rdev->flags & RADEON_IS_MOBILITY)
1393 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1395 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1397 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1398 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1399 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1400 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1402 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1403 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1404 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1405 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1407 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1408 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1409 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1410 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1412 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1413 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1414 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1415 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1418 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1419 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1420 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1421 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1422 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1423 rdev->pm.power_state[idx].num_clock_modes - 1;
1425 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1426 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1427 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1428 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1429 rdev->pm.power_state[idx].num_clock_modes - 1;
1433 * btc_pm_init_profile - Initialize power profiles callback.
1435 * @rdev: radeon_device pointer
1437 * Initialize the power states used in profile mode
1439 * Used for profile mode only.
1441 void btc_pm_init_profile(struct radeon_device *rdev)
1446 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1447 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1448 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1449 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1450 /* starting with BTC, there is one state that is used for both
1451 * MH and SH. Difference is that we always use the high clock index for
1454 if (rdev->flags & RADEON_IS_MOBILITY)
1455 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1457 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1459 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1460 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1461 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1462 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1464 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1465 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1466 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1467 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1469 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1470 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1471 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1472 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1474 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1475 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1476 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1477 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1479 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1480 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1481 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1482 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1484 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1485 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1486 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1487 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1491 * evergreen_pm_misc - set additional pm hw parameters callback.
1493 * @rdev: radeon_device pointer
1495 * Set non-clock parameters associated with a power state
1496 * (voltage, etc.) (evergreen+).
1498 void evergreen_pm_misc(struct radeon_device *rdev)
1500 int req_ps_idx = rdev->pm.requested_power_state_index;
1501 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1502 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1503 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1505 if (voltage->type == VOLTAGE_SW) {
1506 /* 0xff0x are flags rather then an actual voltage */
1507 if ((voltage->voltage & 0xff00) == 0xff00)
1509 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1510 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1511 rdev->pm.current_vddc = voltage->voltage;
1512 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1515 /* starting with BTC, there is one state that is used for both
1516 * MH and SH. Difference is that we always use the high clock index for
1519 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1520 (rdev->family >= CHIP_BARTS) &&
1521 rdev->pm.active_crtc_count &&
1522 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1523 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1524 voltage = &rdev->pm.power_state[req_ps_idx].
1525 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1527 /* 0xff0x are flags rather then an actual voltage */
1528 if ((voltage->vddci & 0xff00) == 0xff00)
1530 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1531 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1532 rdev->pm.current_vddci = voltage->vddci;
1533 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1539 * evergreen_pm_prepare - pre-power state change callback.
1541 * @rdev: radeon_device pointer
1543 * Prepare for a power state change (evergreen+).
1545 void evergreen_pm_prepare(struct radeon_device *rdev)
1547 struct drm_device *ddev = rdev->ddev;
1548 struct drm_crtc *crtc;
1549 struct radeon_crtc *radeon_crtc;
1552 /* disable any active CRTCs */
1553 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1554 radeon_crtc = to_radeon_crtc(crtc);
1555 if (radeon_crtc->enabled) {
1556 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1557 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1558 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1564 * evergreen_pm_finish - post-power state change callback.
1566 * @rdev: radeon_device pointer
1568 * Clean up after a power state change (evergreen+).
1570 void evergreen_pm_finish(struct radeon_device *rdev)
1572 struct drm_device *ddev = rdev->ddev;
1573 struct drm_crtc *crtc;
1574 struct radeon_crtc *radeon_crtc;
1577 /* enable any active CRTCs */
1578 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1579 radeon_crtc = to_radeon_crtc(crtc);
1580 if (radeon_crtc->enabled) {
1581 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1582 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1583 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1589 * evergreen_hpd_sense - hpd sense callback.
1591 * @rdev: radeon_device pointer
1592 * @hpd: hpd (hotplug detect) pin
1594 * Checks if a digital monitor is connected (evergreen+).
1595 * Returns true if connected, false if not connected.
1597 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1599 bool connected = false;
1603 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1607 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1611 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1615 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1619 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1623 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1634 * evergreen_hpd_set_polarity - hpd set polarity callback.
1636 * @rdev: radeon_device pointer
1637 * @hpd: hpd (hotplug detect) pin
1639 * Set the polarity of the hpd pin (evergreen+).
1641 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1642 enum radeon_hpd_id hpd)
1645 bool connected = evergreen_hpd_sense(rdev, hpd);
1649 tmp = RREG32(DC_HPD1_INT_CONTROL);
1651 tmp &= ~DC_HPDx_INT_POLARITY;
1653 tmp |= DC_HPDx_INT_POLARITY;
1654 WREG32(DC_HPD1_INT_CONTROL, tmp);
1657 tmp = RREG32(DC_HPD2_INT_CONTROL);
1659 tmp &= ~DC_HPDx_INT_POLARITY;
1661 tmp |= DC_HPDx_INT_POLARITY;
1662 WREG32(DC_HPD2_INT_CONTROL, tmp);
1665 tmp = RREG32(DC_HPD3_INT_CONTROL);
1667 tmp &= ~DC_HPDx_INT_POLARITY;
1669 tmp |= DC_HPDx_INT_POLARITY;
1670 WREG32(DC_HPD3_INT_CONTROL, tmp);
1673 tmp = RREG32(DC_HPD4_INT_CONTROL);
1675 tmp &= ~DC_HPDx_INT_POLARITY;
1677 tmp |= DC_HPDx_INT_POLARITY;
1678 WREG32(DC_HPD4_INT_CONTROL, tmp);
1681 tmp = RREG32(DC_HPD5_INT_CONTROL);
1683 tmp &= ~DC_HPDx_INT_POLARITY;
1685 tmp |= DC_HPDx_INT_POLARITY;
1686 WREG32(DC_HPD5_INT_CONTROL, tmp);
1689 tmp = RREG32(DC_HPD6_INT_CONTROL);
1691 tmp &= ~DC_HPDx_INT_POLARITY;
1693 tmp |= DC_HPDx_INT_POLARITY;
1694 WREG32(DC_HPD6_INT_CONTROL, tmp);
1702 * evergreen_hpd_init - hpd setup callback.
1704 * @rdev: radeon_device pointer
1706 * Setup the hpd pins used by the card (evergreen+).
1707 * Enable the pin, set the polarity, and enable the hpd interrupts.
1709 void evergreen_hpd_init(struct radeon_device *rdev)
1711 struct drm_device *dev = rdev->ddev;
1712 struct drm_connector *connector;
1713 unsigned enabled = 0;
1714 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1715 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1717 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1718 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1720 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1721 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1722 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1723 * aux dp channel on imac and help (but not completely fix)
1724 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1725 * also avoid interrupt storms during dpms.
1729 switch (radeon_connector->hpd.hpd) {
1731 WREG32(DC_HPD1_CONTROL, tmp);
1734 WREG32(DC_HPD2_CONTROL, tmp);
1737 WREG32(DC_HPD3_CONTROL, tmp);
1740 WREG32(DC_HPD4_CONTROL, tmp);
1743 WREG32(DC_HPD5_CONTROL, tmp);
1746 WREG32(DC_HPD6_CONTROL, tmp);
1751 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1752 enabled |= 1 << radeon_connector->hpd.hpd;
1754 radeon_irq_kms_enable_hpd(rdev, enabled);
1758 * evergreen_hpd_fini - hpd tear down callback.
1760 * @rdev: radeon_device pointer
1762 * Tear down the hpd pins used by the card (evergreen+).
1763 * Disable the hpd interrupts.
1765 void evergreen_hpd_fini(struct radeon_device *rdev)
1767 struct drm_device *dev = rdev->ddev;
1768 struct drm_connector *connector;
1769 unsigned disabled = 0;
1771 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1772 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1773 switch (radeon_connector->hpd.hpd) {
1775 WREG32(DC_HPD1_CONTROL, 0);
1778 WREG32(DC_HPD2_CONTROL, 0);
1781 WREG32(DC_HPD3_CONTROL, 0);
1784 WREG32(DC_HPD4_CONTROL, 0);
1787 WREG32(DC_HPD5_CONTROL, 0);
1790 WREG32(DC_HPD6_CONTROL, 0);
1795 disabled |= 1 << radeon_connector->hpd.hpd;
1797 radeon_irq_kms_disable_hpd(rdev, disabled);
1800 /* watermark setup */
1802 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1803 struct radeon_crtc *radeon_crtc,
1804 struct drm_display_mode *mode,
1805 struct drm_display_mode *other_mode)
1810 * There are 3 line buffers, each one shared by 2 display controllers.
1811 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1812 * the display controllers. The paritioning is done via one of four
1813 * preset allocations specified in bits 2:0:
1814 * first display controller
1815 * 0 - first half of lb (3840 * 2)
1816 * 1 - first 3/4 of lb (5760 * 2)
1817 * 2 - whole lb (7680 * 2), other crtc must be disabled
1818 * 3 - first 1/4 of lb (1920 * 2)
1819 * second display controller
1820 * 4 - second half of lb (3840 * 2)
1821 * 5 - second 3/4 of lb (5760 * 2)
1822 * 6 - whole lb (7680 * 2), other crtc must be disabled
1823 * 7 - last 1/4 of lb (1920 * 2)
1825 /* this can get tricky if we have two large displays on a paired group
1826 * of crtcs. Ideally for multiple large displays we'd assign them to
1827 * non-linked crtcs for maximum line buffer allocation.
1829 if (radeon_crtc->base.enabled && mode) {
1833 tmp = 2; /* whole */
1837 /* second controller of the pair uses second half of the lb */
1838 if (radeon_crtc->crtc_id % 2)
1840 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1842 if (radeon_crtc->base.enabled && mode) {
1847 if (ASIC_IS_DCE5(rdev))
1853 if (ASIC_IS_DCE5(rdev))
1859 if (ASIC_IS_DCE5(rdev))
1865 if (ASIC_IS_DCE5(rdev))
1872 /* controller not enabled, so no lb used */
1876 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1878 u32 tmp = RREG32(MC_SHARED_CHMAP);
1880 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1893 struct evergreen_wm_params {
1894 u32 dram_channels; /* number of dram channels */
1895 u32 yclk; /* bandwidth per dram data pin in kHz */
1896 u32 sclk; /* engine clock in kHz */
1897 u32 disp_clk; /* display clock in kHz */
1898 u32 src_width; /* viewport width */
1899 u32 active_time; /* active display time in ns */
1900 u32 blank_time; /* blank time in ns */
1901 bool interlaced; /* mode is interlaced */
1902 fixed20_12 vsc; /* vertical scale ratio */
1903 u32 num_heads; /* number of active crtcs */
1904 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1905 u32 lb_size; /* line buffer allocated to pipe */
1906 u32 vtaps; /* vertical scaler taps */
1909 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1911 /* Calculate DRAM Bandwidth and the part allocated to display. */
1912 fixed20_12 dram_efficiency; /* 0.7 */
1913 fixed20_12 yclk, dram_channels, bandwidth;
1916 a.full = dfixed_const(1000);
1917 yclk.full = dfixed_const(wm->yclk);
1918 yclk.full = dfixed_div(yclk, a);
1919 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1920 a.full = dfixed_const(10);
1921 dram_efficiency.full = dfixed_const(7);
1922 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1923 bandwidth.full = dfixed_mul(dram_channels, yclk);
1924 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1926 return dfixed_trunc(bandwidth);
1929 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1931 /* Calculate DRAM Bandwidth and the part allocated to display. */
1932 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1933 fixed20_12 yclk, dram_channels, bandwidth;
1936 a.full = dfixed_const(1000);
1937 yclk.full = dfixed_const(wm->yclk);
1938 yclk.full = dfixed_div(yclk, a);
1939 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1940 a.full = dfixed_const(10);
1941 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1942 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1943 bandwidth.full = dfixed_mul(dram_channels, yclk);
1944 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1946 return dfixed_trunc(bandwidth);
1949 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1951 /* Calculate the display Data return Bandwidth */
1952 fixed20_12 return_efficiency; /* 0.8 */
1953 fixed20_12 sclk, bandwidth;
1956 a.full = dfixed_const(1000);
1957 sclk.full = dfixed_const(wm->sclk);
1958 sclk.full = dfixed_div(sclk, a);
1959 a.full = dfixed_const(10);
1960 return_efficiency.full = dfixed_const(8);
1961 return_efficiency.full = dfixed_div(return_efficiency, a);
1962 a.full = dfixed_const(32);
1963 bandwidth.full = dfixed_mul(a, sclk);
1964 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1966 return dfixed_trunc(bandwidth);
1969 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1971 /* Calculate the DMIF Request Bandwidth */
1972 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1973 fixed20_12 disp_clk, bandwidth;
1976 a.full = dfixed_const(1000);
1977 disp_clk.full = dfixed_const(wm->disp_clk);
1978 disp_clk.full = dfixed_div(disp_clk, a);
1979 a.full = dfixed_const(10);
1980 disp_clk_request_efficiency.full = dfixed_const(8);
1981 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1982 a.full = dfixed_const(32);
1983 bandwidth.full = dfixed_mul(a, disp_clk);
1984 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1986 return dfixed_trunc(bandwidth);
1989 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1991 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1992 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1993 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1994 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1996 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1999 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2001 /* Calculate the display mode Average Bandwidth
2002 * DisplayMode should contain the source and destination dimensions,
2006 fixed20_12 line_time;
2007 fixed20_12 src_width;
2008 fixed20_12 bandwidth;
2011 a.full = dfixed_const(1000);
2012 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2013 line_time.full = dfixed_div(line_time, a);
2014 bpp.full = dfixed_const(wm->bytes_per_pixel);
2015 src_width.full = dfixed_const(wm->src_width);
2016 bandwidth.full = dfixed_mul(src_width, bpp);
2017 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2018 bandwidth.full = dfixed_div(bandwidth, line_time);
2020 return dfixed_trunc(bandwidth);
2023 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2025 /* First calcualte the latency in ns */
2026 u32 mc_latency = 2000; /* 2000 ns. */
2027 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2028 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2029 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2030 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2031 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2032 (wm->num_heads * cursor_line_pair_return_time);
2033 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2034 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2037 if (wm->num_heads == 0)
2040 a.full = dfixed_const(2);
2041 b.full = dfixed_const(1);
2042 if ((wm->vsc.full > a.full) ||
2043 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2045 ((wm->vsc.full >= a.full) && wm->interlaced))
2046 max_src_lines_per_dst_line = 4;
2048 max_src_lines_per_dst_line = 2;
2050 a.full = dfixed_const(available_bandwidth);
2051 b.full = dfixed_const(wm->num_heads);
2052 a.full = dfixed_div(a, b);
2054 b.full = dfixed_const(1000);
2055 c.full = dfixed_const(wm->disp_clk);
2056 b.full = dfixed_div(c, b);
2057 c.full = dfixed_const(wm->bytes_per_pixel);
2058 b.full = dfixed_mul(b, c);
2060 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2062 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2063 b.full = dfixed_const(1000);
2064 c.full = dfixed_const(lb_fill_bw);
2065 b.full = dfixed_div(c, b);
2066 a.full = dfixed_div(a, b);
2067 line_fill_time = dfixed_trunc(a);
2069 if (line_fill_time < wm->active_time)
2072 return latency + (line_fill_time - wm->active_time);
2076 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2078 if (evergreen_average_bandwidth(wm) <=
2079 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2085 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2087 if (evergreen_average_bandwidth(wm) <=
2088 (evergreen_available_bandwidth(wm) / wm->num_heads))
2094 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2096 u32 lb_partitions = wm->lb_size / wm->src_width;
2097 u32 line_time = wm->active_time + wm->blank_time;
2098 u32 latency_tolerant_lines;
2102 a.full = dfixed_const(1);
2103 if (wm->vsc.full > a.full)
2104 latency_tolerant_lines = 1;
2106 if (lb_partitions <= (wm->vtaps + 1))
2107 latency_tolerant_lines = 1;
2109 latency_tolerant_lines = 2;
2112 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2114 if (evergreen_latency_watermark(wm) <= latency_hiding)
2120 static void evergreen_program_watermarks(struct radeon_device *rdev,
2121 struct radeon_crtc *radeon_crtc,
2122 u32 lb_size, u32 num_heads)
2124 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2125 struct evergreen_wm_params wm_low, wm_high;
2129 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2130 u32 priority_a_mark = 0, priority_b_mark = 0;
2131 u32 priority_a_cnt = PRIORITY_OFF;
2132 u32 priority_b_cnt = PRIORITY_OFF;
2133 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2134 u32 tmp, arb_control3;
2137 if (radeon_crtc->base.enabled && num_heads && mode) {
2138 pixel_period = 1000000 / (u32)mode->clock;
2139 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2142 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2144 /* watermark for high clocks */
2145 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2147 radeon_dpm_get_mclk(rdev, false) * 10;
2149 radeon_dpm_get_sclk(rdev, false) * 10;
2151 wm_high.yclk = rdev->pm.current_mclk * 10;
2152 wm_high.sclk = rdev->pm.current_sclk * 10;
2155 wm_high.disp_clk = mode->clock;
2156 wm_high.src_width = mode->crtc_hdisplay;
2157 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2158 wm_high.blank_time = line_time - wm_high.active_time;
2159 wm_high.interlaced = false;
2160 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2161 wm_high.interlaced = true;
2162 wm_high.vsc = radeon_crtc->vsc;
2164 if (radeon_crtc->rmx_type != RMX_OFF)
2166 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2167 wm_high.lb_size = lb_size;
2168 wm_high.dram_channels = dram_channels;
2169 wm_high.num_heads = num_heads;
2171 /* watermark for low clocks */
2172 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2174 radeon_dpm_get_mclk(rdev, true) * 10;
2176 radeon_dpm_get_sclk(rdev, true) * 10;
2178 wm_low.yclk = rdev->pm.current_mclk * 10;
2179 wm_low.sclk = rdev->pm.current_sclk * 10;
2182 wm_low.disp_clk = mode->clock;
2183 wm_low.src_width = mode->crtc_hdisplay;
2184 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2185 wm_low.blank_time = line_time - wm_low.active_time;
2186 wm_low.interlaced = false;
2187 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2188 wm_low.interlaced = true;
2189 wm_low.vsc = radeon_crtc->vsc;
2191 if (radeon_crtc->rmx_type != RMX_OFF)
2193 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2194 wm_low.lb_size = lb_size;
2195 wm_low.dram_channels = dram_channels;
2196 wm_low.num_heads = num_heads;
2198 /* set for high clocks */
2199 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2200 /* set for low clocks */
2201 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2203 /* possibly force display priority to high */
2204 /* should really do this at mode validation time... */
2205 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2206 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2207 !evergreen_check_latency_hiding(&wm_high) ||
2208 (rdev->disp_priority == 2)) {
2209 DRM_DEBUG_KMS("force priority a to high\n");
2210 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2212 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2213 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2214 !evergreen_check_latency_hiding(&wm_low) ||
2215 (rdev->disp_priority == 2)) {
2216 DRM_DEBUG_KMS("force priority b to high\n");
2217 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2220 a.full = dfixed_const(1000);
2221 b.full = dfixed_const(mode->clock);
2222 b.full = dfixed_div(b, a);
2223 c.full = dfixed_const(latency_watermark_a);
2224 c.full = dfixed_mul(c, b);
2225 c.full = dfixed_mul(c, radeon_crtc->hsc);
2226 c.full = dfixed_div(c, a);
2227 a.full = dfixed_const(16);
2228 c.full = dfixed_div(c, a);
2229 priority_a_mark = dfixed_trunc(c);
2230 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2232 a.full = dfixed_const(1000);
2233 b.full = dfixed_const(mode->clock);
2234 b.full = dfixed_div(b, a);
2235 c.full = dfixed_const(latency_watermark_b);
2236 c.full = dfixed_mul(c, b);
2237 c.full = dfixed_mul(c, radeon_crtc->hsc);
2238 c.full = dfixed_div(c, a);
2239 a.full = dfixed_const(16);
2240 c.full = dfixed_div(c, a);
2241 priority_b_mark = dfixed_trunc(c);
2242 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2246 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2248 tmp &= ~LATENCY_WATERMARK_MASK(3);
2249 tmp |= LATENCY_WATERMARK_MASK(1);
2250 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2251 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2252 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2253 LATENCY_HIGH_WATERMARK(line_time)));
2255 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2256 tmp &= ~LATENCY_WATERMARK_MASK(3);
2257 tmp |= LATENCY_WATERMARK_MASK(2);
2258 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2259 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2260 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2261 LATENCY_HIGH_WATERMARK(line_time)));
2262 /* restore original selection */
2263 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2265 /* write the priority marks */
2266 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2267 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2269 /* save values for DPM */
2270 radeon_crtc->line_time = line_time;
2271 radeon_crtc->wm_high = latency_watermark_a;
2272 radeon_crtc->wm_low = latency_watermark_b;
2276 * evergreen_bandwidth_update - update display watermarks callback.
2278 * @rdev: radeon_device pointer
2280 * Update the display watermarks based on the requested mode(s)
2283 void evergreen_bandwidth_update(struct radeon_device *rdev)
2285 struct drm_display_mode *mode0 = NULL;
2286 struct drm_display_mode *mode1 = NULL;
2287 u32 num_heads = 0, lb_size;
2290 radeon_update_display_priority(rdev);
2292 for (i = 0; i < rdev->num_crtc; i++) {
2293 if (rdev->mode_info.crtcs[i]->base.enabled)
2296 for (i = 0; i < rdev->num_crtc; i += 2) {
2297 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2298 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2299 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2300 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2301 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2302 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2307 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2309 * @rdev: radeon_device pointer
2311 * Wait for the MC (memory controller) to be idle.
2313 * Returns 0 if the MC is idle, -1 if not.
2315 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2320 for (i = 0; i < rdev->usec_timeout; i++) {
2321 /* read MC_STATUS */
2322 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2333 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2338 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2340 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2341 for (i = 0; i < rdev->usec_timeout; i++) {
2342 /* read MC_STATUS */
2343 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2344 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2346 DRM_ERROR("[drm] r600 flush TLB failed\n");
2356 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2361 if (rdev->gart.robj == NULL) {
2362 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2365 r = radeon_gart_table_vram_pin(rdev);
2368 radeon_gart_restore(rdev);
2369 /* Setup L2 cache */
2370 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2371 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2372 EFFECTIVE_L2_QUEUE_SIZE(7));
2373 WREG32(VM_L2_CNTL2, 0);
2374 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2375 /* Setup TLB control */
2376 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2377 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2378 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2379 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2380 if (rdev->flags & RADEON_IS_IGP) {
2381 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2382 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2383 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2385 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2386 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2387 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2388 if ((rdev->family == CHIP_JUNIPER) ||
2389 (rdev->family == CHIP_CYPRESS) ||
2390 (rdev->family == CHIP_HEMLOCK) ||
2391 (rdev->family == CHIP_BARTS))
2392 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2394 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2395 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2396 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2397 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2398 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2399 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2400 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2401 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2402 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2403 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2404 (u32)(rdev->dummy_page.addr >> 12));
2405 WREG32(VM_CONTEXT1_CNTL, 0);
2407 evergreen_pcie_gart_tlb_flush(rdev);
2408 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2409 (unsigned)(rdev->mc.gtt_size >> 20),
2410 (unsigned long long)rdev->gart.table_addr);
2411 rdev->gart.ready = true;
2415 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2419 /* Disable all tables */
2420 WREG32(VM_CONTEXT0_CNTL, 0);
2421 WREG32(VM_CONTEXT1_CNTL, 0);
2423 /* Setup L2 cache */
2424 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2425 EFFECTIVE_L2_QUEUE_SIZE(7));
2426 WREG32(VM_L2_CNTL2, 0);
2427 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2428 /* Setup TLB control */
2429 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2430 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2431 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2432 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2433 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2434 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2435 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2436 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2437 radeon_gart_table_vram_unpin(rdev);
2440 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2442 evergreen_pcie_gart_disable(rdev);
2443 radeon_gart_table_vram_free(rdev);
2444 radeon_gart_fini(rdev);
2448 static void evergreen_agp_enable(struct radeon_device *rdev)
2452 /* Setup L2 cache */
2453 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2454 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2455 EFFECTIVE_L2_QUEUE_SIZE(7));
2456 WREG32(VM_L2_CNTL2, 0);
2457 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2458 /* Setup TLB control */
2459 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2460 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2461 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2462 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2463 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2464 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2465 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2466 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2467 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2468 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2469 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2470 WREG32(VM_CONTEXT0_CNTL, 0);
2471 WREG32(VM_CONTEXT1_CNTL, 0);
2474 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2476 u32 crtc_enabled, tmp, frame_count, blackout;
2479 if (!ASIC_IS_NODCE(rdev)) {
2480 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2481 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2483 /* disable VGA render */
2484 WREG32(VGA_RENDER_CONTROL, 0);
2486 /* blank the display controllers */
2487 for (i = 0; i < rdev->num_crtc; i++) {
2488 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2490 save->crtc_enabled[i] = true;
2491 if (ASIC_IS_DCE6(rdev)) {
2492 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2493 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2494 radeon_wait_for_vblank(rdev, i);
2495 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2496 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2497 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2500 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2501 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2502 radeon_wait_for_vblank(rdev, i);
2503 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2504 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2505 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2506 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2509 /* wait for the next frame */
2510 frame_count = radeon_get_vblank_counter(rdev, i);
2511 for (j = 0; j < rdev->usec_timeout; j++) {
2512 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2517 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2518 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2519 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2520 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2521 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2522 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2523 save->crtc_enabled[i] = false;
2526 save->crtc_enabled[i] = false;
2530 radeon_mc_wait_for_idle(rdev);
2532 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2533 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2534 /* Block CPU access */
2535 WREG32(BIF_FB_EN, 0);
2536 /* blackout the MC */
2537 blackout &= ~BLACKOUT_MODE_MASK;
2538 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2540 /* wait for the MC to settle */
2543 /* lock double buffered regs */
2544 for (i = 0; i < rdev->num_crtc; i++) {
2545 if (save->crtc_enabled[i]) {
2546 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2547 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2548 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2549 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2551 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2554 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2560 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2562 u32 tmp, frame_count;
2565 /* update crtc base addresses */
2566 for (i = 0; i < rdev->num_crtc; i++) {
2567 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2568 upper_32_bits(rdev->mc.vram_start));
2569 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2570 upper_32_bits(rdev->mc.vram_start));
2571 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2572 (u32)rdev->mc.vram_start);
2573 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2574 (u32)rdev->mc.vram_start);
2577 if (!ASIC_IS_NODCE(rdev)) {
2578 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2579 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2582 /* unlock regs and wait for update */
2583 for (i = 0; i < rdev->num_crtc; i++) {
2584 if (save->crtc_enabled[i]) {
2585 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2586 if ((tmp & 0x3) != 0) {
2588 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2590 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2591 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2592 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2593 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2595 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2598 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2600 for (j = 0; j < rdev->usec_timeout; j++) {
2601 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2602 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2609 /* unblackout the MC */
2610 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2611 tmp &= ~BLACKOUT_MODE_MASK;
2612 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2613 /* allow CPU access */
2614 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2616 for (i = 0; i < rdev->num_crtc; i++) {
2617 if (save->crtc_enabled[i]) {
2618 if (ASIC_IS_DCE6(rdev)) {
2619 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2620 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2621 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2622 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2623 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2625 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2626 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2627 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2628 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2629 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2631 /* wait for the next frame */
2632 frame_count = radeon_get_vblank_counter(rdev, i);
2633 for (j = 0; j < rdev->usec_timeout; j++) {
2634 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2640 if (!ASIC_IS_NODCE(rdev)) {
2641 /* Unlock vga access */
2642 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2644 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2648 void evergreen_mc_program(struct radeon_device *rdev)
2650 struct evergreen_mc_save save;
2654 /* Initialize HDP */
2655 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2656 WREG32((0x2c14 + j), 0x00000000);
2657 WREG32((0x2c18 + j), 0x00000000);
2658 WREG32((0x2c1c + j), 0x00000000);
2659 WREG32((0x2c20 + j), 0x00000000);
2660 WREG32((0x2c24 + j), 0x00000000);
2662 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2664 evergreen_mc_stop(rdev, &save);
2665 if (evergreen_mc_wait_for_idle(rdev)) {
2666 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2668 /* Lockout access through VGA aperture*/
2669 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2670 /* Update configuration */
2671 if (rdev->flags & RADEON_IS_AGP) {
2672 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2673 /* VRAM before AGP */
2674 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2675 rdev->mc.vram_start >> 12);
2676 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2677 rdev->mc.gtt_end >> 12);
2679 /* VRAM after AGP */
2680 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2681 rdev->mc.gtt_start >> 12);
2682 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2683 rdev->mc.vram_end >> 12);
2686 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2687 rdev->mc.vram_start >> 12);
2688 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2689 rdev->mc.vram_end >> 12);
2691 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2692 /* llano/ontario only */
2693 if ((rdev->family == CHIP_PALM) ||
2694 (rdev->family == CHIP_SUMO) ||
2695 (rdev->family == CHIP_SUMO2)) {
2696 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2697 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2698 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2699 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2701 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2702 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2703 WREG32(MC_VM_FB_LOCATION, tmp);
2704 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2705 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2706 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2707 if (rdev->flags & RADEON_IS_AGP) {
2708 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2709 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2710 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2712 WREG32(MC_VM_AGP_BASE, 0);
2713 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2714 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2716 if (evergreen_mc_wait_for_idle(rdev)) {
2717 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2719 evergreen_mc_resume(rdev, &save);
2720 /* we need to own VRAM, so turn off the VGA renderer here
2721 * to stop it overwriting our objects */
2722 rv515_vga_render_disable(rdev);
2728 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2730 struct radeon_ring *ring = &rdev->ring[ib->ring];
2733 /* set to DX10/11 mode */
2734 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2735 radeon_ring_write(ring, 1);
2737 if (ring->rptr_save_reg) {
2738 next_rptr = ring->wptr + 3 + 4;
2739 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2740 radeon_ring_write(ring, ((ring->rptr_save_reg -
2741 PACKET3_SET_CONFIG_REG_START) >> 2));
2742 radeon_ring_write(ring, next_rptr);
2743 } else if (rdev->wb.enabled) {
2744 next_rptr = ring->wptr + 5 + 4;
2745 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2746 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2747 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2748 radeon_ring_write(ring, next_rptr);
2749 radeon_ring_write(ring, 0);
2752 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2753 radeon_ring_write(ring,
2757 (ib->gpu_addr & 0xFFFFFFFC));
2758 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2759 radeon_ring_write(ring, ib->length_dw);
2763 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2765 const __be32 *fw_data;
2768 if (!rdev->me_fw || !rdev->pfp_fw)
2776 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2778 fw_data = (const __be32 *)rdev->pfp_fw->data;
2779 WREG32(CP_PFP_UCODE_ADDR, 0);
2780 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2781 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2782 WREG32(CP_PFP_UCODE_ADDR, 0);
2784 fw_data = (const __be32 *)rdev->me_fw->data;
2785 WREG32(CP_ME_RAM_WADDR, 0);
2786 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2787 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2789 WREG32(CP_PFP_UCODE_ADDR, 0);
2790 WREG32(CP_ME_RAM_WADDR, 0);
2791 WREG32(CP_ME_RAM_RADDR, 0);
2795 static int evergreen_cp_start(struct radeon_device *rdev)
2797 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2801 r = radeon_ring_lock(rdev, ring, 7);
2803 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2806 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2807 radeon_ring_write(ring, 0x1);
2808 radeon_ring_write(ring, 0x0);
2809 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2810 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2811 radeon_ring_write(ring, 0);
2812 radeon_ring_write(ring, 0);
2813 radeon_ring_unlock_commit(rdev, ring);
2816 WREG32(CP_ME_CNTL, cp_me);
2818 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2820 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2824 /* setup clear context state */
2825 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2826 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2828 for (i = 0; i < evergreen_default_size; i++)
2829 radeon_ring_write(ring, evergreen_default_state[i]);
2831 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2832 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2834 /* set clear context state */
2835 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2836 radeon_ring_write(ring, 0);
2838 /* SQ_VTX_BASE_VTX_LOC */
2839 radeon_ring_write(ring, 0xc0026f00);
2840 radeon_ring_write(ring, 0x00000000);
2841 radeon_ring_write(ring, 0x00000000);
2842 radeon_ring_write(ring, 0x00000000);
2845 radeon_ring_write(ring, 0xc0036f00);
2846 radeon_ring_write(ring, 0x00000bc4);
2847 radeon_ring_write(ring, 0xffffffff);
2848 radeon_ring_write(ring, 0xffffffff);
2849 radeon_ring_write(ring, 0xffffffff);
2851 radeon_ring_write(ring, 0xc0026900);
2852 radeon_ring_write(ring, 0x00000316);
2853 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2854 radeon_ring_write(ring, 0x00000010); /* */
2856 radeon_ring_unlock_commit(rdev, ring);
2861 static int evergreen_cp_resume(struct radeon_device *rdev)
2863 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2868 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2869 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2875 RREG32(GRBM_SOFT_RESET);
2877 WREG32(GRBM_SOFT_RESET, 0);
2878 RREG32(GRBM_SOFT_RESET);
2880 /* Set ring buffer size */
2881 rb_bufsz = drm_order(ring->ring_size / 8);
2882 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2884 tmp |= BUF_SWAP_32BIT;
2886 WREG32(CP_RB_CNTL, tmp);
2887 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2888 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2890 /* Set the write pointer delay */
2891 WREG32(CP_RB_WPTR_DELAY, 0);
2893 /* Initialize the ring buffer's read and write pointers */
2894 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2895 WREG32(CP_RB_RPTR_WR, 0);
2897 WREG32(CP_RB_WPTR, ring->wptr);
2899 /* set the wb address whether it's enabled or not */
2900 WREG32(CP_RB_RPTR_ADDR,
2901 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2902 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2903 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2905 if (rdev->wb.enabled)
2906 WREG32(SCRATCH_UMSK, 0xff);
2908 tmp |= RB_NO_UPDATE;
2909 WREG32(SCRATCH_UMSK, 0);
2913 WREG32(CP_RB_CNTL, tmp);
2915 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2916 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2918 ring->rptr = RREG32(CP_RB_RPTR);
2920 evergreen_cp_start(rdev);
2922 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2924 ring->ready = false;
2933 static void evergreen_gpu_init(struct radeon_device *rdev)
2936 u32 mc_shared_chmap, mc_arb_ramcfg;
2940 u32 sq_lds_resource_mgmt;
2941 u32 sq_gpr_resource_mgmt_1;
2942 u32 sq_gpr_resource_mgmt_2;
2943 u32 sq_gpr_resource_mgmt_3;
2944 u32 sq_thread_resource_mgmt;
2945 u32 sq_thread_resource_mgmt_2;
2946 u32 sq_stack_resource_mgmt_1;
2947 u32 sq_stack_resource_mgmt_2;
2948 u32 sq_stack_resource_mgmt_3;
2949 u32 vgt_cache_invalidation;
2950 u32 hdp_host_path_cntl, tmp;
2951 u32 disabled_rb_mask;
2952 int i, j, num_shader_engines, ps_thread_count;
2954 switch (rdev->family) {
2957 rdev->config.evergreen.num_ses = 2;
2958 rdev->config.evergreen.max_pipes = 4;
2959 rdev->config.evergreen.max_tile_pipes = 8;
2960 rdev->config.evergreen.max_simds = 10;
2961 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2962 rdev->config.evergreen.max_gprs = 256;
2963 rdev->config.evergreen.max_threads = 248;
2964 rdev->config.evergreen.max_gs_threads = 32;
2965 rdev->config.evergreen.max_stack_entries = 512;
2966 rdev->config.evergreen.sx_num_of_sets = 4;
2967 rdev->config.evergreen.sx_max_export_size = 256;
2968 rdev->config.evergreen.sx_max_export_pos_size = 64;
2969 rdev->config.evergreen.sx_max_export_smx_size = 192;
2970 rdev->config.evergreen.max_hw_contexts = 8;
2971 rdev->config.evergreen.sq_num_cf_insts = 2;
2973 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2974 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2975 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2976 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2979 rdev->config.evergreen.num_ses = 1;
2980 rdev->config.evergreen.max_pipes = 4;
2981 rdev->config.evergreen.max_tile_pipes = 4;
2982 rdev->config.evergreen.max_simds = 10;
2983 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2984 rdev->config.evergreen.max_gprs = 256;
2985 rdev->config.evergreen.max_threads = 248;
2986 rdev->config.evergreen.max_gs_threads = 32;
2987 rdev->config.evergreen.max_stack_entries = 512;
2988 rdev->config.evergreen.sx_num_of_sets = 4;
2989 rdev->config.evergreen.sx_max_export_size = 256;
2990 rdev->config.evergreen.sx_max_export_pos_size = 64;
2991 rdev->config.evergreen.sx_max_export_smx_size = 192;
2992 rdev->config.evergreen.max_hw_contexts = 8;
2993 rdev->config.evergreen.sq_num_cf_insts = 2;
2995 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2996 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2997 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2998 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3001 rdev->config.evergreen.num_ses = 1;
3002 rdev->config.evergreen.max_pipes = 4;
3003 rdev->config.evergreen.max_tile_pipes = 4;
3004 rdev->config.evergreen.max_simds = 5;
3005 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3006 rdev->config.evergreen.max_gprs = 256;
3007 rdev->config.evergreen.max_threads = 248;
3008 rdev->config.evergreen.max_gs_threads = 32;
3009 rdev->config.evergreen.max_stack_entries = 256;
3010 rdev->config.evergreen.sx_num_of_sets = 4;
3011 rdev->config.evergreen.sx_max_export_size = 256;
3012 rdev->config.evergreen.sx_max_export_pos_size = 64;
3013 rdev->config.evergreen.sx_max_export_smx_size = 192;
3014 rdev->config.evergreen.max_hw_contexts = 8;
3015 rdev->config.evergreen.sq_num_cf_insts = 2;
3017 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3018 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3019 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3020 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3024 rdev->config.evergreen.num_ses = 1;
3025 rdev->config.evergreen.max_pipes = 2;
3026 rdev->config.evergreen.max_tile_pipes = 2;
3027 rdev->config.evergreen.max_simds = 2;
3028 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3029 rdev->config.evergreen.max_gprs = 256;
3030 rdev->config.evergreen.max_threads = 192;
3031 rdev->config.evergreen.max_gs_threads = 16;
3032 rdev->config.evergreen.max_stack_entries = 256;
3033 rdev->config.evergreen.sx_num_of_sets = 4;
3034 rdev->config.evergreen.sx_max_export_size = 128;
3035 rdev->config.evergreen.sx_max_export_pos_size = 32;
3036 rdev->config.evergreen.sx_max_export_smx_size = 96;
3037 rdev->config.evergreen.max_hw_contexts = 4;
3038 rdev->config.evergreen.sq_num_cf_insts = 1;
3040 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3041 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3042 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3043 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3046 rdev->config.evergreen.num_ses = 1;
3047 rdev->config.evergreen.max_pipes = 2;
3048 rdev->config.evergreen.max_tile_pipes = 2;
3049 rdev->config.evergreen.max_simds = 2;
3050 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3051 rdev->config.evergreen.max_gprs = 256;
3052 rdev->config.evergreen.max_threads = 192;
3053 rdev->config.evergreen.max_gs_threads = 16;
3054 rdev->config.evergreen.max_stack_entries = 256;
3055 rdev->config.evergreen.sx_num_of_sets = 4;
3056 rdev->config.evergreen.sx_max_export_size = 128;
3057 rdev->config.evergreen.sx_max_export_pos_size = 32;
3058 rdev->config.evergreen.sx_max_export_smx_size = 96;
3059 rdev->config.evergreen.max_hw_contexts = 4;
3060 rdev->config.evergreen.sq_num_cf_insts = 1;
3062 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3063 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3064 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3065 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3068 rdev->config.evergreen.num_ses = 1;
3069 rdev->config.evergreen.max_pipes = 4;
3070 rdev->config.evergreen.max_tile_pipes = 4;
3071 if (rdev->ddev->pci_device == 0x9648)
3072 rdev->config.evergreen.max_simds = 3;
3073 else if ((rdev->ddev->pci_device == 0x9647) ||
3074 (rdev->ddev->pci_device == 0x964a))
3075 rdev->config.evergreen.max_simds = 4;
3077 rdev->config.evergreen.max_simds = 5;
3078 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3079 rdev->config.evergreen.max_gprs = 256;
3080 rdev->config.evergreen.max_threads = 248;
3081 rdev->config.evergreen.max_gs_threads = 32;
3082 rdev->config.evergreen.max_stack_entries = 256;
3083 rdev->config.evergreen.sx_num_of_sets = 4;
3084 rdev->config.evergreen.sx_max_export_size = 256;
3085 rdev->config.evergreen.sx_max_export_pos_size = 64;
3086 rdev->config.evergreen.sx_max_export_smx_size = 192;
3087 rdev->config.evergreen.max_hw_contexts = 8;
3088 rdev->config.evergreen.sq_num_cf_insts = 2;
3090 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3091 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3092 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3093 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3096 rdev->config.evergreen.num_ses = 1;
3097 rdev->config.evergreen.max_pipes = 4;
3098 rdev->config.evergreen.max_tile_pipes = 4;
3099 rdev->config.evergreen.max_simds = 2;
3100 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3101 rdev->config.evergreen.max_gprs = 256;
3102 rdev->config.evergreen.max_threads = 248;
3103 rdev->config.evergreen.max_gs_threads = 32;
3104 rdev->config.evergreen.max_stack_entries = 512;
3105 rdev->config.evergreen.sx_num_of_sets = 4;
3106 rdev->config.evergreen.sx_max_export_size = 256;
3107 rdev->config.evergreen.sx_max_export_pos_size = 64;
3108 rdev->config.evergreen.sx_max_export_smx_size = 192;
3109 rdev->config.evergreen.max_hw_contexts = 8;
3110 rdev->config.evergreen.sq_num_cf_insts = 2;
3112 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3113 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3114 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3115 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3118 rdev->config.evergreen.num_ses = 2;
3119 rdev->config.evergreen.max_pipes = 4;
3120 rdev->config.evergreen.max_tile_pipes = 8;
3121 rdev->config.evergreen.max_simds = 7;
3122 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3123 rdev->config.evergreen.max_gprs = 256;
3124 rdev->config.evergreen.max_threads = 248;
3125 rdev->config.evergreen.max_gs_threads = 32;
3126 rdev->config.evergreen.max_stack_entries = 512;
3127 rdev->config.evergreen.sx_num_of_sets = 4;
3128 rdev->config.evergreen.sx_max_export_size = 256;
3129 rdev->config.evergreen.sx_max_export_pos_size = 64;
3130 rdev->config.evergreen.sx_max_export_smx_size = 192;
3131 rdev->config.evergreen.max_hw_contexts = 8;
3132 rdev->config.evergreen.sq_num_cf_insts = 2;
3134 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3135 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3136 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3137 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3140 rdev->config.evergreen.num_ses = 1;
3141 rdev->config.evergreen.max_pipes = 4;
3142 rdev->config.evergreen.max_tile_pipes = 4;
3143 rdev->config.evergreen.max_simds = 6;
3144 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3145 rdev->config.evergreen.max_gprs = 256;
3146 rdev->config.evergreen.max_threads = 248;
3147 rdev->config.evergreen.max_gs_threads = 32;
3148 rdev->config.evergreen.max_stack_entries = 256;
3149 rdev->config.evergreen.sx_num_of_sets = 4;
3150 rdev->config.evergreen.sx_max_export_size = 256;
3151 rdev->config.evergreen.sx_max_export_pos_size = 64;
3152 rdev->config.evergreen.sx_max_export_smx_size = 192;
3153 rdev->config.evergreen.max_hw_contexts = 8;
3154 rdev->config.evergreen.sq_num_cf_insts = 2;
3156 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3157 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3158 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3159 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3162 rdev->config.evergreen.num_ses = 1;
3163 rdev->config.evergreen.max_pipes = 2;
3164 rdev->config.evergreen.max_tile_pipes = 2;
3165 rdev->config.evergreen.max_simds = 2;
3166 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3167 rdev->config.evergreen.max_gprs = 256;
3168 rdev->config.evergreen.max_threads = 192;
3169 rdev->config.evergreen.max_gs_threads = 16;
3170 rdev->config.evergreen.max_stack_entries = 256;
3171 rdev->config.evergreen.sx_num_of_sets = 4;
3172 rdev->config.evergreen.sx_max_export_size = 128;
3173 rdev->config.evergreen.sx_max_export_pos_size = 32;
3174 rdev->config.evergreen.sx_max_export_smx_size = 96;
3175 rdev->config.evergreen.max_hw_contexts = 4;
3176 rdev->config.evergreen.sq_num_cf_insts = 1;
3178 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3179 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3180 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3181 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3185 /* Initialize HDP */
3186 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3187 WREG32((0x2c14 + j), 0x00000000);
3188 WREG32((0x2c18 + j), 0x00000000);
3189 WREG32((0x2c1c + j), 0x00000000);
3190 WREG32((0x2c20 + j), 0x00000000);
3191 WREG32((0x2c24 + j), 0x00000000);
3194 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3196 evergreen_fix_pci_max_read_req_size(rdev);
3198 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3199 if ((rdev->family == CHIP_PALM) ||
3200 (rdev->family == CHIP_SUMO) ||
3201 (rdev->family == CHIP_SUMO2))
3202 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3204 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3206 /* setup tiling info dword. gb_addr_config is not adequate since it does
3207 * not have bank info, so create a custom tiling dword.
3208 * bits 3:0 num_pipes
3209 * bits 7:4 num_banks
3210 * bits 11:8 group_size
3211 * bits 15:12 row_size
3213 rdev->config.evergreen.tile_config = 0;
3214 switch (rdev->config.evergreen.max_tile_pipes) {
3217 rdev->config.evergreen.tile_config |= (0 << 0);
3220 rdev->config.evergreen.tile_config |= (1 << 0);
3223 rdev->config.evergreen.tile_config |= (2 << 0);
3226 rdev->config.evergreen.tile_config |= (3 << 0);
3229 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3230 if (rdev->flags & RADEON_IS_IGP)
3231 rdev->config.evergreen.tile_config |= 1 << 4;
3233 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3234 case 0: /* four banks */
3235 rdev->config.evergreen.tile_config |= 0 << 4;
3237 case 1: /* eight banks */
3238 rdev->config.evergreen.tile_config |= 1 << 4;
3240 case 2: /* sixteen banks */
3242 rdev->config.evergreen.tile_config |= 2 << 4;
3246 rdev->config.evergreen.tile_config |= 0 << 8;
3247 rdev->config.evergreen.tile_config |=
3248 ((gb_addr_config & 0x30000000) >> 28) << 12;
3250 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3252 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3256 efuse_straps_4 = RREG32_RCU(0x204);
3257 efuse_straps_3 = RREG32_RCU(0x203);
3258 tmp = (((efuse_straps_4 & 0xf) << 4) |
3259 ((efuse_straps_3 & 0xf0000000) >> 28));
3262 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3263 u32 rb_disable_bitmap;
3265 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3266 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3267 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3269 tmp |= rb_disable_bitmap;
3272 /* enabled rb are just the one not disabled :) */
3273 disabled_rb_mask = tmp;
3275 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3277 /* if all the backends are disabled, fix it up here */
3278 if ((disabled_rb_mask & tmp) == tmp) {
3279 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3280 disabled_rb_mask &= ~(1 << i);
3283 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3284 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3286 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3287 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3288 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3289 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3290 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3291 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3292 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3294 if ((rdev->config.evergreen.max_backends == 1) &&
3295 (rdev->flags & RADEON_IS_IGP)) {
3296 if ((disabled_rb_mask & 3) == 1) {
3297 /* RB0 disabled, RB1 enabled */
3300 /* RB1 disabled, RB0 enabled */
3304 tmp = gb_addr_config & NUM_PIPES_MASK;
3305 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3306 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3308 WREG32(GB_BACKEND_MAP, tmp);
3310 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3311 WREG32(CGTS_TCC_DISABLE, 0);
3312 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3313 WREG32(CGTS_USER_TCC_DISABLE, 0);
3315 /* set HW defaults for 3D engine */
3316 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3317 ROQ_IB2_START(0x2b)));
3319 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3321 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3326 sx_debug_1 = RREG32(SX_DEBUG_1);
3327 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3328 WREG32(SX_DEBUG_1, sx_debug_1);
3331 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3332 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3333 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3334 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3336 if (rdev->family <= CHIP_SUMO2)
3337 WREG32(SMX_SAR_CTL0, 0x00010000);
3339 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3340 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3341 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3343 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3344 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3345 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3347 WREG32(VGT_NUM_INSTANCES, 1);
3348 WREG32(SPI_CONFIG_CNTL, 0);
3349 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3350 WREG32(CP_PERFMON_CNTL, 0);
3352 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3353 FETCH_FIFO_HIWATER(0x4) |
3354 DONE_FIFO_HIWATER(0xe0) |
3355 ALU_UPDATE_FIFO_HIWATER(0x8)));
3357 sq_config = RREG32(SQ_CONFIG);
3358 sq_config &= ~(PS_PRIO(3) |
3362 sq_config |= (VC_ENABLE |
3369 switch (rdev->family) {
3375 /* no vertex cache */
3376 sq_config &= ~VC_ENABLE;
3382 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3384 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3385 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3386 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3387 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3388 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3389 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3390 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3392 switch (rdev->family) {
3397 ps_thread_count = 96;
3400 ps_thread_count = 128;
3404 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3405 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3406 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3407 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3408 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3409 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3411 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3412 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3413 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3414 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3415 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3416 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3418 WREG32(SQ_CONFIG, sq_config);
3419 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3420 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3421 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3422 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3423 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3424 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3425 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3426 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3427 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3428 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3430 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3431 FORCE_EOV_MAX_REZ_CNT(255)));
3433 switch (rdev->family) {
3439 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3442 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3445 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3446 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3448 WREG32(VGT_GS_VERTEX_REUSE, 16);
3449 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3450 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3452 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3453 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3455 WREG32(CB_PERF_CTR0_SEL_0, 0);
3456 WREG32(CB_PERF_CTR0_SEL_1, 0);
3457 WREG32(CB_PERF_CTR1_SEL_0, 0);
3458 WREG32(CB_PERF_CTR1_SEL_1, 0);
3459 WREG32(CB_PERF_CTR2_SEL_0, 0);
3460 WREG32(CB_PERF_CTR2_SEL_1, 0);
3461 WREG32(CB_PERF_CTR3_SEL_0, 0);
3462 WREG32(CB_PERF_CTR3_SEL_1, 0);
3464 /* clear render buffer base addresses */
3465 WREG32(CB_COLOR0_BASE, 0);
3466 WREG32(CB_COLOR1_BASE, 0);
3467 WREG32(CB_COLOR2_BASE, 0);
3468 WREG32(CB_COLOR3_BASE, 0);
3469 WREG32(CB_COLOR4_BASE, 0);
3470 WREG32(CB_COLOR5_BASE, 0);
3471 WREG32(CB_COLOR6_BASE, 0);
3472 WREG32(CB_COLOR7_BASE, 0);
3473 WREG32(CB_COLOR8_BASE, 0);
3474 WREG32(CB_COLOR9_BASE, 0);
3475 WREG32(CB_COLOR10_BASE, 0);
3476 WREG32(CB_COLOR11_BASE, 0);
3478 /* set the shader const cache sizes to 0 */
3479 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3481 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3484 tmp = RREG32(HDP_MISC_CNTL);
3485 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3486 WREG32(HDP_MISC_CNTL, tmp);
3488 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3489 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3491 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3497 int evergreen_mc_init(struct radeon_device *rdev)
3500 int chansize, numchan;
3502 /* Get VRAM informations */
3503 rdev->mc.vram_is_ddr = true;
3504 if ((rdev->family == CHIP_PALM) ||
3505 (rdev->family == CHIP_SUMO) ||
3506 (rdev->family == CHIP_SUMO2))
3507 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3509 tmp = RREG32(MC_ARB_RAMCFG);
3510 if (tmp & CHANSIZE_OVERRIDE) {
3512 } else if (tmp & CHANSIZE_MASK) {
3517 tmp = RREG32(MC_SHARED_CHMAP);
3518 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3533 rdev->mc.vram_width = numchan * chansize;
3534 /* Could aper size report 0 ? */
3535 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
3536 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
3537 /* Setup GPU memory space */
3538 if ((rdev->family == CHIP_PALM) ||
3539 (rdev->family == CHIP_SUMO) ||
3540 (rdev->family == CHIP_SUMO2)) {
3541 /* size in bytes on fusion */
3542 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3543 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3545 /* size in MB on evergreen/cayman/tn */
3546 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3547 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3549 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3550 r700_vram_gtt_location(rdev, &rdev->mc);
3551 radeon_update_bandwidth_info(rdev);
3556 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3558 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3559 RREG32(GRBM_STATUS));
3560 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3561 RREG32(GRBM_STATUS_SE0));
3562 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3563 RREG32(GRBM_STATUS_SE1));
3564 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3565 RREG32(SRBM_STATUS));
3566 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3567 RREG32(SRBM_STATUS2));
3568 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3569 RREG32(CP_STALLED_STAT1));
3570 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3571 RREG32(CP_STALLED_STAT2));
3572 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3573 RREG32(CP_BUSY_STAT));
3574 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3576 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3577 RREG32(DMA_STATUS_REG));
3578 if (rdev->family >= CHIP_CAYMAN) {
3579 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3580 RREG32(DMA_STATUS_REG + 0x800));
3584 bool evergreen_is_display_hung(struct radeon_device *rdev)
3590 for (i = 0; i < rdev->num_crtc; i++) {
3591 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3592 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3593 crtc_hung |= (1 << i);
3597 for (j = 0; j < 10; j++) {
3598 for (i = 0; i < rdev->num_crtc; i++) {
3599 if (crtc_hung & (1 << i)) {
3600 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3601 if (tmp != crtc_status[i])
3602 crtc_hung &= ~(1 << i);
3613 static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3619 tmp = RREG32(GRBM_STATUS);
3620 if (tmp & (PA_BUSY | SC_BUSY |
3622 TA_BUSY | VGT_BUSY |
3624 SPI_BUSY | VGT_BUSY_NO_DMA))
3625 reset_mask |= RADEON_RESET_GFX;
3627 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3628 CP_BUSY | CP_COHERENCY_BUSY))
3629 reset_mask |= RADEON_RESET_CP;
3631 if (tmp & GRBM_EE_BUSY)
3632 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3634 /* DMA_STATUS_REG */
3635 tmp = RREG32(DMA_STATUS_REG);
3636 if (!(tmp & DMA_IDLE))
3637 reset_mask |= RADEON_RESET_DMA;
3640 tmp = RREG32(SRBM_STATUS2);
3642 reset_mask |= RADEON_RESET_DMA;
3645 tmp = RREG32(SRBM_STATUS);
3646 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3647 reset_mask |= RADEON_RESET_RLC;
3650 reset_mask |= RADEON_RESET_IH;
3653 reset_mask |= RADEON_RESET_SEM;
3655 if (tmp & GRBM_RQ_PENDING)
3656 reset_mask |= RADEON_RESET_GRBM;
3659 reset_mask |= RADEON_RESET_VMC;
3661 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3662 MCC_BUSY | MCD_BUSY))
3663 reset_mask |= RADEON_RESET_MC;
3665 if (evergreen_is_display_hung(rdev))
3666 reset_mask |= RADEON_RESET_DISPLAY;
3669 tmp = RREG32(VM_L2_STATUS);
3671 reset_mask |= RADEON_RESET_VMC;
3673 /* Skip MC reset as it's mostly likely not hung, just busy */
3674 if (reset_mask & RADEON_RESET_MC) {
3675 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3676 reset_mask &= ~RADEON_RESET_MC;
3682 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3684 struct evergreen_mc_save save;
3685 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3688 if (reset_mask == 0)
3691 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3693 evergreen_print_gpu_status_regs(rdev);
3695 /* Disable CP parsing/prefetching */
3696 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3698 if (reset_mask & RADEON_RESET_DMA) {
3700 tmp = RREG32(DMA_RB_CNTL);
3701 tmp &= ~DMA_RB_ENABLE;
3702 WREG32(DMA_RB_CNTL, tmp);
3707 evergreen_mc_stop(rdev, &save);
3708 if (evergreen_mc_wait_for_idle(rdev)) {
3709 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3712 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3713 grbm_soft_reset |= SOFT_RESET_DB |
3726 if (reset_mask & RADEON_RESET_CP) {
3727 grbm_soft_reset |= SOFT_RESET_CP |
3730 srbm_soft_reset |= SOFT_RESET_GRBM;
3733 if (reset_mask & RADEON_RESET_DMA)
3734 srbm_soft_reset |= SOFT_RESET_DMA;
3736 if (reset_mask & RADEON_RESET_DISPLAY)
3737 srbm_soft_reset |= SOFT_RESET_DC;
3739 if (reset_mask & RADEON_RESET_RLC)
3740 srbm_soft_reset |= SOFT_RESET_RLC;
3742 if (reset_mask & RADEON_RESET_SEM)
3743 srbm_soft_reset |= SOFT_RESET_SEM;
3745 if (reset_mask & RADEON_RESET_IH)
3746 srbm_soft_reset |= SOFT_RESET_IH;
3748 if (reset_mask & RADEON_RESET_GRBM)
3749 srbm_soft_reset |= SOFT_RESET_GRBM;
3751 if (reset_mask & RADEON_RESET_VMC)
3752 srbm_soft_reset |= SOFT_RESET_VMC;
3754 if (!(rdev->flags & RADEON_IS_IGP)) {
3755 if (reset_mask & RADEON_RESET_MC)
3756 srbm_soft_reset |= SOFT_RESET_MC;
3759 if (grbm_soft_reset) {
3760 tmp = RREG32(GRBM_SOFT_RESET);
3761 tmp |= grbm_soft_reset;
3762 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3763 WREG32(GRBM_SOFT_RESET, tmp);
3764 tmp = RREG32(GRBM_SOFT_RESET);
3768 tmp &= ~grbm_soft_reset;
3769 WREG32(GRBM_SOFT_RESET, tmp);
3770 tmp = RREG32(GRBM_SOFT_RESET);
3773 if (srbm_soft_reset) {
3774 tmp = RREG32(SRBM_SOFT_RESET);
3775 tmp |= srbm_soft_reset;
3776 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3777 WREG32(SRBM_SOFT_RESET, tmp);
3778 tmp = RREG32(SRBM_SOFT_RESET);
3782 tmp &= ~srbm_soft_reset;
3783 WREG32(SRBM_SOFT_RESET, tmp);
3784 tmp = RREG32(SRBM_SOFT_RESET);
3787 /* Wait a little for things to settle down */
3790 evergreen_mc_resume(rdev, &save);
3793 evergreen_print_gpu_status_regs(rdev);
3796 int evergreen_asic_reset(struct radeon_device *rdev)
3800 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3803 r600_set_bios_scratch_engine_hung(rdev, true);
3805 evergreen_gpu_soft_reset(rdev, reset_mask);
3807 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3810 r600_set_bios_scratch_engine_hung(rdev, false);
3816 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3818 * @rdev: radeon_device pointer
3819 * @ring: radeon_ring structure holding ring information
3821 * Check if the GFX engine is locked up.
3822 * Returns true if the engine appears to be locked up, false if not.
3824 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3826 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3828 if (!(reset_mask & (RADEON_RESET_GFX |
3829 RADEON_RESET_COMPUTE |
3830 RADEON_RESET_CP))) {
3831 radeon_ring_lockup_update(ring);
3834 /* force CP activities */
3835 radeon_ring_force_activity(rdev, ring);
3836 return radeon_ring_test_lockup(rdev, ring);
3840 * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3842 * @rdev: radeon_device pointer
3843 * @ring: radeon_ring structure holding ring information
3845 * Check if the async DMA engine is locked up.
3846 * Returns true if the engine appears to be locked up, false if not.
3848 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3850 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3852 if (!(reset_mask & RADEON_RESET_DMA)) {
3853 radeon_ring_lockup_update(ring);
3856 /* force ring activities */
3857 radeon_ring_force_activity(rdev, ring);
3858 return radeon_ring_test_lockup(rdev, ring);
3864 #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
3865 #define RLC_CLEAR_STATE_END_MARKER 0x00000001
3867 void sumo_rlc_fini(struct radeon_device *rdev)
3871 /* save restore block */
3872 if (rdev->rlc.save_restore_obj) {
3873 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3874 if (unlikely(r != 0))
3875 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3876 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3877 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3879 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3880 rdev->rlc.save_restore_obj = NULL;
3883 /* clear state block */
3884 if (rdev->rlc.clear_state_obj) {
3885 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3886 if (unlikely(r != 0))
3887 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3888 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3889 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3891 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3892 rdev->rlc.clear_state_obj = NULL;
3896 int sumo_rlc_init(struct radeon_device *rdev)
3899 volatile u32 *dst_ptr;
3900 u32 dws, data, i, j, k, reg_num;
3901 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index;
3902 u64 reg_list_mc_addr;
3903 struct cs_section_def *cs_data;
3908 src_ptr = rdev->rlc.reg_list;
3909 dws = rdev->rlc.reg_list_size;
3910 cs_data = rdev->rlc.cs_data;
3912 /* save restore block */
3913 if (rdev->rlc.save_restore_obj == NULL) {
3914 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3915 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3917 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3922 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3923 if (unlikely(r != 0)) {
3924 sumo_rlc_fini(rdev);
3927 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3928 &rdev->rlc.save_restore_gpu_addr);
3930 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3931 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
3932 sumo_rlc_fini(rdev);
3935 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void**)&vptr);
3937 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
3938 sumo_rlc_fini(rdev);
3941 rdev->rlc.sr_ptr = vptr;
3942 /* write the sr buffer */
3943 dst_ptr = rdev->rlc.sr_ptr;
3945 * dw0: (reg2 << 16) | reg1
3946 * dw1: reg1 save space
3947 * dw2: reg2 save space
3949 for (i = 0; i < dws; i++) {
3950 data = src_ptr[i] >> 2;
3953 data |= (src_ptr[i] >> 2) << 16;
3954 j = (((i - 1) * 3) / 2);
3958 dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER;
3960 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
3961 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3963 /* clear state block */
3966 for (i = 0; cs_data[i].section != NULL; i++) {
3967 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
3969 dws += cs_data[i].section[j].reg_count;
3972 reg_list_blk_index = (3 * reg_list_num + 2);
3973 dws += reg_list_blk_index;
3975 if (rdev->rlc.clear_state_obj == NULL) {
3976 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3977 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
3979 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
3980 sumo_rlc_fini(rdev);
3984 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3985 if (unlikely(r != 0)) {
3986 sumo_rlc_fini(rdev);
3989 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
3990 &rdev->rlc.clear_state_gpu_addr);
3993 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3994 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
3995 sumo_rlc_fini(rdev);
3998 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&vptr);
4000 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4001 sumo_rlc_fini(rdev);
4004 rdev->rlc.cs_ptr = vptr;
4005 /* set up the cs buffer */
4006 dst_ptr = rdev->rlc.cs_ptr;
4007 reg_list_hdr_blk_index = 0;
4008 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4009 data = upper_32_bits(reg_list_mc_addr);
4010 dst_ptr[reg_list_hdr_blk_index] = data;
4011 reg_list_hdr_blk_index++;
4012 for (i = 0; cs_data[i].section != NULL; i++) {
4013 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4014 reg_num = cs_data[i].section[j].reg_count;
4015 data = reg_list_mc_addr & 0xffffffff;
4016 dst_ptr[reg_list_hdr_blk_index] = data;
4017 reg_list_hdr_blk_index++;
4019 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4020 dst_ptr[reg_list_hdr_blk_index] = data;
4021 reg_list_hdr_blk_index++;
4023 data = 0x08000000 | (reg_num * 4);
4024 dst_ptr[reg_list_hdr_blk_index] = data;
4025 reg_list_hdr_blk_index++;
4027 for (k = 0; k < reg_num; k++) {
4028 data = cs_data[i].section[j].extent[k];
4029 dst_ptr[reg_list_blk_index + k] = data;
4031 reg_list_mc_addr += reg_num * 4;
4032 reg_list_blk_index += reg_num;
4035 dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER;
4037 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4038 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4043 static void evergreen_rlc_start(struct radeon_device *rdev)
4045 u32 mask = RLC_ENABLE;
4047 if (rdev->flags & RADEON_IS_IGP) {
4048 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4051 WREG32(RLC_CNTL, mask);
4054 int evergreen_rlc_resume(struct radeon_device *rdev)
4057 const __be32 *fw_data;
4062 r600_rlc_stop(rdev);
4064 WREG32(RLC_HB_CNTL, 0);
4066 if (rdev->flags & RADEON_IS_IGP) {
4067 if (rdev->family == CHIP_ARUBA) {
4068 u32 always_on_bitmap =
4069 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4070 /* find out the number of active simds */
4071 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4072 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4073 tmp = hweight32(~tmp);
4074 if (tmp == rdev->config.cayman.max_simds_per_se) {
4075 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4076 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4077 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4078 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4079 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4082 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4083 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4085 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4086 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4088 WREG32(RLC_HB_BASE, 0);
4089 WREG32(RLC_HB_RPTR, 0);
4090 WREG32(RLC_HB_WPTR, 0);
4091 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4092 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4094 WREG32(RLC_MC_CNTL, 0);
4095 WREG32(RLC_UCODE_CNTL, 0);
4097 fw_data = (const __be32 *)rdev->rlc_fw->data;
4098 if (rdev->family >= CHIP_ARUBA) {
4099 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4100 WREG32(RLC_UCODE_ADDR, i);
4101 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4103 } else if (rdev->family >= CHIP_CAYMAN) {
4104 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4105 WREG32(RLC_UCODE_ADDR, i);
4106 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4109 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4110 WREG32(RLC_UCODE_ADDR, i);
4111 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4114 WREG32(RLC_UCODE_ADDR, 0);
4116 evergreen_rlc_start(rdev);
4123 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4125 if (crtc >= rdev->num_crtc)
4128 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4131 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4135 if (rdev->family >= CHIP_CAYMAN) {
4136 cayman_cp_int_cntl_setup(rdev, 0,
4137 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4138 cayman_cp_int_cntl_setup(rdev, 1, 0);
4139 cayman_cp_int_cntl_setup(rdev, 2, 0);
4140 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4141 WREG32(CAYMAN_DMA1_CNTL, tmp);
4143 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4144 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4145 WREG32(DMA_CNTL, tmp);
4146 WREG32(GRBM_INT_CNTL, 0);
4147 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4148 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4149 if (rdev->num_crtc >= 4) {
4150 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4151 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4153 if (rdev->num_crtc >= 6) {
4154 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4155 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4158 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4159 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4160 if (rdev->num_crtc >= 4) {
4161 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4162 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4164 if (rdev->num_crtc >= 6) {
4165 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4166 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4169 /* only one DAC on DCE6 */
4170 if (!ASIC_IS_DCE6(rdev))
4171 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4172 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4174 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4175 WREG32(DC_HPD1_INT_CONTROL, tmp);
4176 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4177 WREG32(DC_HPD2_INT_CONTROL, tmp);
4178 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4179 WREG32(DC_HPD3_INT_CONTROL, tmp);
4180 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4181 WREG32(DC_HPD4_INT_CONTROL, tmp);
4182 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4183 WREG32(DC_HPD5_INT_CONTROL, tmp);
4184 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4185 WREG32(DC_HPD6_INT_CONTROL, tmp);
4189 int evergreen_irq_set(struct radeon_device *rdev)
4191 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4192 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4193 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4194 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4195 u32 grbm_int_cntl = 0;
4196 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
4197 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4198 u32 dma_cntl, dma_cntl1 = 0;
4199 u32 thermal_int = 0;
4201 if (!rdev->irq.installed) {
4202 dev_warn(rdev->dev, "Can't enable IRQ/MSI because no handler is installed\n");
4205 /* don't enable anything if the ih is disabled */
4206 if (!rdev->ih.enabled) {
4207 r600_disable_interrupts(rdev);
4208 /* force the active interrupt state to all disabled */
4209 evergreen_disable_interrupt_state(rdev);
4213 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4214 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4215 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4216 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4217 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4218 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4219 if (rdev->family == CHIP_ARUBA)
4220 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4221 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4223 thermal_int = RREG32(CG_THERMAL_INT) &
4224 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4226 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4227 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4228 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4229 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4230 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4231 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4233 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4235 if (rdev->family >= CHIP_CAYMAN) {
4236 /* enable CP interrupts on all rings */
4237 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4238 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4239 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4241 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4242 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4243 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4245 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4246 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4247 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4250 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4251 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4252 cp_int_cntl |= RB_INT_ENABLE;
4253 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4257 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4258 DRM_DEBUG("r600_irq_set: sw int dma\n");
4259 dma_cntl |= TRAP_ENABLE;
4262 if (rdev->family >= CHIP_CAYMAN) {
4263 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4264 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4265 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4266 dma_cntl1 |= TRAP_ENABLE;
4270 if (rdev->irq.dpm_thermal) {
4271 DRM_DEBUG("dpm thermal\n");
4272 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4275 if (rdev->irq.crtc_vblank_int[0] ||
4276 atomic_read(&rdev->irq.pflip[0])) {
4277 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4278 crtc1 |= VBLANK_INT_MASK;
4280 if (rdev->irq.crtc_vblank_int[1] ||
4281 atomic_read(&rdev->irq.pflip[1])) {
4282 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4283 crtc2 |= VBLANK_INT_MASK;
4285 if (rdev->irq.crtc_vblank_int[2] ||
4286 atomic_read(&rdev->irq.pflip[2])) {
4287 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4288 crtc3 |= VBLANK_INT_MASK;
4290 if (rdev->irq.crtc_vblank_int[3] ||
4291 atomic_read(&rdev->irq.pflip[3])) {
4292 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4293 crtc4 |= VBLANK_INT_MASK;
4295 if (rdev->irq.crtc_vblank_int[4] ||
4296 atomic_read(&rdev->irq.pflip[4])) {
4297 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4298 crtc5 |= VBLANK_INT_MASK;
4300 if (rdev->irq.crtc_vblank_int[5] ||
4301 atomic_read(&rdev->irq.pflip[5])) {
4302 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4303 crtc6 |= VBLANK_INT_MASK;
4305 if (rdev->irq.hpd[0]) {
4306 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4307 hpd1 |= DC_HPDx_INT_EN;
4309 if (rdev->irq.hpd[1]) {
4310 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4311 hpd2 |= DC_HPDx_INT_EN;
4313 if (rdev->irq.hpd[2]) {
4314 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4315 hpd3 |= DC_HPDx_INT_EN;
4317 if (rdev->irq.hpd[3]) {
4318 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4319 hpd4 |= DC_HPDx_INT_EN;
4321 if (rdev->irq.hpd[4]) {
4322 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4323 hpd5 |= DC_HPDx_INT_EN;
4325 if (rdev->irq.hpd[5]) {
4326 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4327 hpd6 |= DC_HPDx_INT_EN;
4329 if (rdev->irq.afmt[0]) {
4330 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4331 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4333 if (rdev->irq.afmt[1]) {
4334 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4335 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4337 if (rdev->irq.afmt[2]) {
4338 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4339 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4341 if (rdev->irq.afmt[3]) {
4342 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4343 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4345 if (rdev->irq.afmt[4]) {
4346 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4347 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4349 if (rdev->irq.afmt[5]) {
4350 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4351 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4354 if (rdev->family >= CHIP_CAYMAN) {
4355 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4356 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4357 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4359 WREG32(CP_INT_CNTL, cp_int_cntl);
4361 WREG32(DMA_CNTL, dma_cntl);
4363 if (rdev->family >= CHIP_CAYMAN)
4364 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4366 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4368 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4369 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4370 if (rdev->num_crtc >= 4) {
4371 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4372 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4374 if (rdev->num_crtc >= 6) {
4375 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4376 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4379 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4380 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
4381 if (rdev->num_crtc >= 4) {
4382 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4383 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4385 if (rdev->num_crtc >= 6) {
4386 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4387 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4390 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4391 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4392 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4393 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4394 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4395 WREG32(DC_HPD6_INT_CONTROL, hpd6);
4396 if (rdev->family == CHIP_ARUBA)
4397 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4399 WREG32(CG_THERMAL_INT, thermal_int);
4401 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4402 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4403 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4404 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4405 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4406 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4411 static void evergreen_irq_ack(struct radeon_device *rdev)
4415 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4416 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4417 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4418 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4419 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4420 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4421 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4422 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4423 if (rdev->num_crtc >= 4) {
4424 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4425 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4427 if (rdev->num_crtc >= 6) {
4428 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4429 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4432 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4433 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4434 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4435 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4436 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4437 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4439 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4440 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4441 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4442 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4443 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4444 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4445 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4446 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4447 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4448 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4449 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4450 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4452 if (rdev->num_crtc >= 4) {
4453 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4454 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4455 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4456 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4457 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4458 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4459 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4460 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4461 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4462 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4463 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4464 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4467 if (rdev->num_crtc >= 6) {
4468 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4469 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4470 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4471 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4472 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4473 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4474 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4475 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4476 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4477 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4478 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4479 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4482 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4483 tmp = RREG32(DC_HPD1_INT_CONTROL);
4484 tmp |= DC_HPDx_INT_ACK;
4485 WREG32(DC_HPD1_INT_CONTROL, tmp);
4487 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4488 tmp = RREG32(DC_HPD2_INT_CONTROL);
4489 tmp |= DC_HPDx_INT_ACK;
4490 WREG32(DC_HPD2_INT_CONTROL, tmp);
4492 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4493 tmp = RREG32(DC_HPD3_INT_CONTROL);
4494 tmp |= DC_HPDx_INT_ACK;
4495 WREG32(DC_HPD3_INT_CONTROL, tmp);
4497 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4498 tmp = RREG32(DC_HPD4_INT_CONTROL);
4499 tmp |= DC_HPDx_INT_ACK;
4500 WREG32(DC_HPD4_INT_CONTROL, tmp);
4502 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4503 tmp = RREG32(DC_HPD5_INT_CONTROL);
4504 tmp |= DC_HPDx_INT_ACK;
4505 WREG32(DC_HPD5_INT_CONTROL, tmp);
4507 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4508 tmp = RREG32(DC_HPD5_INT_CONTROL);
4509 tmp |= DC_HPDx_INT_ACK;
4510 WREG32(DC_HPD6_INT_CONTROL, tmp);
4512 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4513 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4514 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4515 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4517 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4518 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4519 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4520 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4522 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4523 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4524 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4525 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4527 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4528 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4529 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4530 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4532 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4533 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4534 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4535 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4537 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4538 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4539 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4540 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4544 static void evergreen_irq_disable(struct radeon_device *rdev)
4546 r600_disable_interrupts(rdev);
4547 /* Wait and acknowledge irq */
4549 evergreen_irq_ack(rdev);
4550 evergreen_disable_interrupt_state(rdev);
4553 void evergreen_irq_suspend(struct radeon_device *rdev)
4555 evergreen_irq_disable(rdev);
4556 r600_rlc_stop(rdev);
4559 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4563 if (rdev->wb.enabled)
4564 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4566 wptr = RREG32(IH_RB_WPTR);
4568 if (wptr & RB_OVERFLOW) {
4569 /* When a ring buffer overflow happen start parsing interrupt
4570 * from the last not overwritten vector (wptr + 16). Hopefully
4571 * this should allow us to catchup.
4573 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4574 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4575 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4576 tmp = RREG32(IH_RB_CNTL);
4577 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4578 WREG32(IH_RB_CNTL, tmp);
4580 return (wptr & rdev->ih.ptr_mask);
4583 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
4587 u32 src_id, src_data;
4589 bool queue_hotplug = false;
4590 bool queue_hdmi = false;
4591 bool queue_thermal = false;
4594 if (!rdev->ih.enabled || rdev->shutdown)
4597 wptr = evergreen_get_ih_wptr(rdev);
4600 /* is somebody else already processing irqs? */
4601 if (atomic_xchg(&rdev->ih.lock, 1))
4604 rptr = rdev->ih.rptr;
4605 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4607 /* Order reading of wptr vs. reading of IH ring data */
4610 /* display interrupts */
4611 evergreen_irq_ack(rdev);
4613 while (rptr != wptr) {
4614 /* wptr/rptr are in bytes! */
4615 ring_index = rptr / 4;
4616 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4617 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4620 case 1: /* D1 vblank/vline */
4622 case 0: /* D1 vblank */
4623 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4624 if (rdev->irq.crtc_vblank_int[0]) {
4625 drm_handle_vblank(rdev->ddev, 0);
4626 rdev->pm.vblank_sync = true;
4627 DRM_WAKEUP(&rdev->irq.vblank_queue);
4629 if (atomic_read(&rdev->irq.pflip[0]))
4630 radeon_crtc_handle_flip(rdev, 0);
4631 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4632 DRM_DEBUG("IH: D1 vblank\n");
4635 case 1: /* D1 vline */
4636 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4637 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4638 DRM_DEBUG("IH: D1 vline\n");
4642 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4646 case 2: /* D2 vblank/vline */
4648 case 0: /* D2 vblank */
4649 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4650 if (rdev->irq.crtc_vblank_int[1]) {
4651 drm_handle_vblank(rdev->ddev, 1);
4652 rdev->pm.vblank_sync = true;
4653 DRM_WAKEUP(&rdev->irq.vblank_queue);
4655 if (atomic_read(&rdev->irq.pflip[1]))
4656 radeon_crtc_handle_flip(rdev, 1);
4657 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4658 DRM_DEBUG("IH: D2 vblank\n");
4661 case 1: /* D2 vline */
4662 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4663 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4664 DRM_DEBUG("IH: D2 vline\n");
4668 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4672 case 3: /* D3 vblank/vline */
4674 case 0: /* D3 vblank */
4675 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4676 if (rdev->irq.crtc_vblank_int[2]) {
4677 drm_handle_vblank(rdev->ddev, 2);
4678 rdev->pm.vblank_sync = true;
4679 DRM_WAKEUP(&rdev->irq.vblank_queue);
4681 if (atomic_read(&rdev->irq.pflip[2]))
4682 radeon_crtc_handle_flip(rdev, 2);
4683 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4684 DRM_DEBUG("IH: D3 vblank\n");
4687 case 1: /* D3 vline */
4688 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4689 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4690 DRM_DEBUG("IH: D3 vline\n");
4694 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4698 case 4: /* D4 vblank/vline */
4700 case 0: /* D4 vblank */
4701 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4702 if (rdev->irq.crtc_vblank_int[3]) {
4703 drm_handle_vblank(rdev->ddev, 3);
4704 rdev->pm.vblank_sync = true;
4705 DRM_WAKEUP(&rdev->irq.vblank_queue);
4707 if (atomic_read(&rdev->irq.pflip[3]))
4708 radeon_crtc_handle_flip(rdev, 3);
4709 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4710 DRM_DEBUG("IH: D4 vblank\n");
4713 case 1: /* D4 vline */
4714 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4715 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4716 DRM_DEBUG("IH: D4 vline\n");
4720 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4724 case 5: /* D5 vblank/vline */
4726 case 0: /* D5 vblank */
4727 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4728 if (rdev->irq.crtc_vblank_int[4]) {
4729 drm_handle_vblank(rdev->ddev, 4);
4730 rdev->pm.vblank_sync = true;
4731 DRM_WAKEUP(&rdev->irq.vblank_queue);
4733 if (atomic_read(&rdev->irq.pflip[4]))
4734 radeon_crtc_handle_flip(rdev, 4);
4735 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4736 DRM_DEBUG("IH: D5 vblank\n");
4739 case 1: /* D5 vline */
4740 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4741 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4742 DRM_DEBUG("IH: D5 vline\n");
4746 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4750 case 6: /* D6 vblank/vline */
4752 case 0: /* D6 vblank */
4753 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4754 if (rdev->irq.crtc_vblank_int[5]) {
4755 drm_handle_vblank(rdev->ddev, 5);
4756 rdev->pm.vblank_sync = true;
4757 DRM_WAKEUP(&rdev->irq.vblank_queue);
4759 if (atomic_read(&rdev->irq.pflip[5]))
4760 radeon_crtc_handle_flip(rdev, 5);
4761 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4762 DRM_DEBUG("IH: D6 vblank\n");
4765 case 1: /* D6 vline */
4766 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4767 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4768 DRM_DEBUG("IH: D6 vline\n");
4772 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4776 case 42: /* HPD hotplug */
4779 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4780 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4781 queue_hotplug = true;
4782 DRM_DEBUG("IH: HPD1\n");
4786 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4787 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4788 queue_hotplug = true;
4789 DRM_DEBUG("IH: HPD2\n");
4793 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4794 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4795 queue_hotplug = true;
4796 DRM_DEBUG("IH: HPD3\n");
4800 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4801 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4802 queue_hotplug = true;
4803 DRM_DEBUG("IH: HPD4\n");
4807 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4808 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4809 queue_hotplug = true;
4810 DRM_DEBUG("IH: HPD5\n");
4814 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4815 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4816 queue_hotplug = true;
4817 DRM_DEBUG("IH: HPD6\n");
4821 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4828 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4829 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4831 DRM_DEBUG("IH: HDMI0\n");
4835 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4836 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4838 DRM_DEBUG("IH: HDMI1\n");
4842 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4843 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4845 DRM_DEBUG("IH: HDMI2\n");
4849 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4850 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4852 DRM_DEBUG("IH: HDMI3\n");
4856 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4857 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4859 DRM_DEBUG("IH: HDMI4\n");
4863 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4864 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4866 DRM_DEBUG("IH: HDMI5\n");
4870 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4874 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4875 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4879 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4880 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
4881 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4882 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4884 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4886 cayman_vm_decode_fault(rdev, status, addr);
4887 /* reset addr and status */
4888 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4890 case 176: /* CP_INT in ring buffer */
4891 case 177: /* CP_INT in IB1 */
4892 case 178: /* CP_INT in IB2 */
4893 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4894 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4896 case 181: /* CP EOP event */
4897 DRM_DEBUG("IH: CP EOP\n");
4898 if (rdev->family >= CHIP_CAYMAN) {
4901 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4904 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4907 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4911 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4913 case 224: /* DMA trap event */
4914 DRM_DEBUG("IH: DMA trap\n");
4915 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4917 case 230: /* thermal low to high */
4918 DRM_DEBUG("IH: thermal low to high\n");
4919 rdev->pm.dpm.thermal.high_to_low = false;
4920 queue_thermal = true;
4922 case 231: /* thermal high to low */
4923 DRM_DEBUG("IH: thermal high to low\n");
4924 rdev->pm.dpm.thermal.high_to_low = true;
4925 queue_thermal = true;
4927 case 233: /* GUI IDLE */
4928 DRM_DEBUG("IH: GUI idle\n");
4930 case 244: /* DMA trap event */
4931 if (rdev->family >= CHIP_CAYMAN) {
4932 DRM_DEBUG("IH: DMA1 trap\n");
4933 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4937 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4941 /* wptr/rptr are in bytes! */
4943 rptr &= rdev->ih.ptr_mask;
4946 taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
4948 taskqueue_enqueue(rdev->tq, &rdev->audio_work);
4949 if (queue_thermal && rdev->pm.dpm_enabled)
4950 taskqueue_enqueue(rdev->tq, &rdev->pm.dpm.thermal.work);
4952 rdev->ih.rptr = rptr;
4953 WREG32(IH_RB_RPTR, rdev->ih.rptr);
4954 atomic_set(&rdev->ih.lock, 0);
4956 /* make sure wptr hasn't changed while processing */
4957 wptr = evergreen_get_ih_wptr(rdev);
4965 * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4967 * @rdev: radeon_device pointer
4968 * @fence: radeon fence object
4970 * Add a DMA fence packet to the ring to write
4971 * the fence seq number and DMA trap packet to generate
4972 * an interrupt if needed (evergreen-SI).
4974 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4975 struct radeon_fence *fence)
4977 struct radeon_ring *ring = &rdev->ring[fence->ring];
4978 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4979 /* write the fence */
4980 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
4981 radeon_ring_write(ring, addr & 0xfffffffc);
4982 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4983 radeon_ring_write(ring, fence->seq);
4984 /* generate an interrupt */
4985 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
4987 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4988 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
4989 radeon_ring_write(ring, 1);
4993 * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4995 * @rdev: radeon_device pointer
4996 * @ib: IB object to schedule
4998 * Schedule an IB in the DMA ring (evergreen).
5000 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
5001 struct radeon_ib *ib)
5003 struct radeon_ring *ring = &rdev->ring[ib->ring];
5005 if (rdev->wb.enabled) {
5006 u32 next_rptr = ring->wptr + 4;
5007 while ((next_rptr & 7) != 5)
5010 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
5011 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
5012 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
5013 radeon_ring_write(ring, next_rptr);
5016 /* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
5017 * Pad as necessary with NOPs.
5019 while ((ring->wptr & 7) != 5)
5020 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5021 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
5022 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
5023 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
5028 * evergreen_copy_dma - copy pages using the DMA engine
5030 * @rdev: radeon_device pointer
5031 * @src_offset: src GPU address
5032 * @dst_offset: dst GPU address
5033 * @num_gpu_pages: number of GPU pages to xfer
5034 * @fence: radeon fence object
5036 * Copy GPU paging using the DMA engine (evergreen-cayman).
5037 * Used by the radeon ttm implementation to move pages if
5038 * registered as the asic copy callback.
5040 int evergreen_copy_dma(struct radeon_device *rdev,
5041 uint64_t src_offset, uint64_t dst_offset,
5042 unsigned num_gpu_pages,
5043 struct radeon_fence **fence)
5045 struct radeon_semaphore *sem = NULL;
5046 int ring_index = rdev->asic->copy.dma_ring_index;
5047 struct radeon_ring *ring = &rdev->ring[ring_index];
5048 u32 size_in_dw, cur_size_in_dw;
5052 r = radeon_semaphore_create(rdev, &sem);
5054 DRM_ERROR("radeon: moving bo (%d).\n", r);
5058 size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
5059 num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
5060 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
5062 DRM_ERROR("radeon: moving bo (%d).\n", r);
5063 radeon_semaphore_free(rdev, &sem, NULL);
5067 if (radeon_fence_need_sync(*fence, ring->idx)) {
5068 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
5070 radeon_fence_note_sync(*fence, ring->idx);
5072 radeon_semaphore_free(rdev, &sem, NULL);
5075 for (i = 0; i < num_loops; i++) {
5076 cur_size_in_dw = size_in_dw;
5077 if (cur_size_in_dw > 0xFFFFF)
5078 cur_size_in_dw = 0xFFFFF;
5079 size_in_dw -= cur_size_in_dw;
5080 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
5081 radeon_ring_write(ring, dst_offset & 0xfffffffc);
5082 radeon_ring_write(ring, src_offset & 0xfffffffc);
5083 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
5084 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
5085 src_offset += cur_size_in_dw * 4;
5086 dst_offset += cur_size_in_dw * 4;
5089 r = radeon_fence_emit(rdev, fence, ring->idx);
5091 radeon_ring_unlock_undo(rdev, ring);
5095 radeon_ring_unlock_commit(rdev, ring);
5096 radeon_semaphore_free(rdev, &sem, *fence);
5101 static int evergreen_startup(struct radeon_device *rdev)
5103 struct radeon_ring *ring;
5106 /* enable pcie gen2 link */
5107 evergreen_pcie_gen2_enable(rdev);
5109 evergreen_program_aspm(rdev);
5111 evergreen_mc_program(rdev);
5113 if (ASIC_IS_DCE5(rdev)) {
5114 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5115 r = ni_init_microcode(rdev);
5117 DRM_ERROR("Failed to load firmware!\n");
5121 r = ni_mc_load_microcode(rdev);
5123 DRM_ERROR("Failed to load MC firmware!\n");
5127 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5128 r = r600_init_microcode(rdev);
5130 DRM_ERROR("Failed to load firmware!\n");
5136 r = r600_vram_scratch_init(rdev);
5140 if (rdev->flags & RADEON_IS_AGP) {
5141 evergreen_agp_enable(rdev);
5143 r = evergreen_pcie_gart_enable(rdev);
5147 evergreen_gpu_init(rdev);
5149 r = evergreen_blit_init(rdev);
5151 r600_blit_fini(rdev);
5152 rdev->asic->copy.copy = NULL;
5153 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
5156 /* allocate rlc buffers */
5157 if (rdev->flags & RADEON_IS_IGP) {
5158 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5159 rdev->rlc.reg_list_size = sumo_rlc_save_restore_register_list_size;
5160 rdev->rlc.cs_data = evergreen_cs_data;
5161 r = sumo_rlc_init(rdev);
5163 DRM_ERROR("Failed to init rlc BOs!\n");
5168 /* allocate wb buffer */
5169 r = radeon_wb_init(rdev);
5173 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5175 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5179 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5181 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5185 r = rv770_uvd_resume(rdev);
5187 r = radeon_fence_driver_start_ring(rdev,
5188 R600_RING_TYPE_UVD_INDEX);
5190 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5194 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5197 if (!rdev->irq.installed) {
5198 r = radeon_irq_kms_init(rdev);
5203 r = r600_irq_init(rdev);
5205 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5206 radeon_irq_kms_fini(rdev);
5209 evergreen_irq_set(rdev);
5211 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5212 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5213 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
5214 0, 0xfffff, RADEON_CP_PACKET2);
5218 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5219 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5220 DMA_RB_RPTR, DMA_RB_WPTR,
5221 2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5225 r = evergreen_cp_load_microcode(rdev);
5228 r = evergreen_cp_resume(rdev);
5231 r = r600_dma_resume(rdev);
5235 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5236 if (ring->ring_size) {
5237 r = radeon_ring_init(rdev, ring, ring->ring_size,
5238 R600_WB_UVD_RPTR_OFFSET,
5239 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
5240 0, 0xfffff, RADEON_CP_PACKET2);
5242 r = r600_uvd_init(rdev);
5245 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5248 r = radeon_ib_pool_init(rdev);
5250 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5254 r = r600_audio_init(rdev);
5256 DRM_ERROR("radeon: audio init failed\n");
5263 int evergreen_resume(struct radeon_device *rdev)
5267 /* reset the asic, the gfx blocks are often in a bad state
5268 * after the driver is unloaded or after a resume
5270 if (radeon_asic_reset(rdev))
5271 dev_warn(rdev->dev, "GPU reset failed !\n");
5272 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5273 * posting will perform necessary task to bring back GPU into good
5277 atom_asic_init(rdev->mode_info.atom_context);
5279 /* init golden registers */
5280 evergreen_init_golden_registers(rdev);
5282 rdev->accel_working = true;
5283 r = evergreen_startup(rdev);
5285 DRM_ERROR("evergreen startup failed on resume\n");
5286 rdev->accel_working = false;
5294 int evergreen_suspend(struct radeon_device *rdev)
5296 r600_audio_fini(rdev);
5297 r600_uvd_stop(rdev);
5298 radeon_uvd_suspend(rdev);
5300 r600_dma_stop(rdev);
5301 evergreen_irq_suspend(rdev);
5302 radeon_wb_disable(rdev);
5303 evergreen_pcie_gart_disable(rdev);
5308 /* Plan is to move initialization in that function and use
5309 * helper function so that radeon_device_init pretty much
5310 * do nothing more than calling asic specific function. This
5311 * should also allow to remove a bunch of callback function
5314 int evergreen_init(struct radeon_device *rdev)
5319 if (!radeon_get_bios(rdev)) {
5320 if (ASIC_IS_AVIVO(rdev))
5323 /* Must be an ATOMBIOS */
5324 if (!rdev->is_atom_bios) {
5325 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5328 r = radeon_atombios_init(rdev);
5331 /* reset the asic, the gfx blocks are often in a bad state
5332 * after the driver is unloaded or after a resume
5334 if (radeon_asic_reset(rdev))
5335 dev_warn(rdev->dev, "GPU reset failed !\n");
5336 /* Post card if necessary */
5337 if (!radeon_card_posted(rdev)) {
5339 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5342 DRM_INFO("GPU not posted. posting now...\n");
5343 atom_asic_init(rdev->mode_info.atom_context);
5345 /* init golden registers */
5346 evergreen_init_golden_registers(rdev);
5347 /* Initialize scratch registers */
5348 r600_scratch_init(rdev);
5349 /* Initialize surface registers */
5350 radeon_surface_init(rdev);
5351 /* Initialize clocks */
5352 radeon_get_clock_info(rdev->ddev);
5354 r = radeon_fence_driver_init(rdev);
5357 /* initialize AGP */
5358 if (rdev->flags & RADEON_IS_AGP) {
5359 r = radeon_agp_init(rdev);
5361 radeon_agp_disable(rdev);
5363 /* initialize memory controller */
5364 r = evergreen_mc_init(rdev);
5367 /* Memory manager */
5368 r = radeon_bo_init(rdev);
5372 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5373 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5375 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5376 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5378 r = radeon_uvd_init(rdev);
5380 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5381 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5385 rdev->ih.ring_obj = NULL;
5386 r600_ih_ring_init(rdev, 64 * 1024);
5388 r = r600_pcie_gart_init(rdev);
5392 rdev->accel_working = true;
5393 r = evergreen_startup(rdev);
5395 dev_err(rdev->dev, "disabling GPU acceleration\n");
5397 r600_dma_fini(rdev);
5398 r600_irq_fini(rdev);
5399 if (rdev->flags & RADEON_IS_IGP)
5400 sumo_rlc_fini(rdev);
5401 radeon_wb_fini(rdev);
5402 radeon_ib_pool_fini(rdev);
5403 radeon_irq_kms_fini(rdev);
5404 evergreen_pcie_gart_fini(rdev);
5405 rdev->accel_working = false;
5408 /* Don't start up if the MC ucode is missing on BTC parts.
5409 * The default clocks and voltages before the MC ucode
5410 * is loaded are not suffient for advanced operations.
5412 if (ASIC_IS_DCE5(rdev)) {
5413 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5414 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5422 void evergreen_fini(struct radeon_device *rdev)
5424 r600_audio_fini(rdev);
5425 r600_blit_fini(rdev);
5427 r600_dma_fini(rdev);
5428 r600_irq_fini(rdev);
5429 if (rdev->flags & RADEON_IS_IGP)
5430 sumo_rlc_fini(rdev);
5431 radeon_wb_fini(rdev);
5432 radeon_ib_pool_fini(rdev);
5433 radeon_irq_kms_fini(rdev);
5434 evergreen_pcie_gart_fini(rdev);
5435 r600_uvd_stop(rdev);
5436 radeon_uvd_fini(rdev);
5437 r600_vram_scratch_fini(rdev);
5438 radeon_gem_fini(rdev);
5439 radeon_fence_driver_fini(rdev);
5440 radeon_agp_fini(rdev);
5441 radeon_bo_fini(rdev);
5442 radeon_atombios_fini(rdev);
5443 if (ASIC_IS_DCE5(rdev))
5444 ni_fini_microcode(rdev);
5446 r600_fini_microcode(rdev);
5447 drm_free(rdev->bios, M_DRM);
5451 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5453 u32 link_width_cntl, speed_cntl, mask;
5456 if (radeon_pcie_gen2 == 0)
5459 if (rdev->flags & RADEON_IS_IGP)
5462 if (!(rdev->flags & RADEON_IS_PCIE))
5465 /* x2 cards have a special sequence */
5466 if (ASIC_IS_X2(rdev))
5469 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
5473 if (!(mask & DRM_PCIE_SPEED_50))
5476 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5477 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5478 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5482 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5484 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5485 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5487 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5488 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5489 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5491 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5492 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5493 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5495 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5496 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5497 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5499 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5500 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5501 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5503 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5504 speed_cntl |= LC_GEN2_EN_STRAP;
5505 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5508 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5509 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5511 link_width_cntl |= LC_UPCONFIGURE_DIS;
5513 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5514 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5518 void evergreen_program_aspm(struct radeon_device *rdev)
5521 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5522 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5523 /* fusion_platform = true
5524 * if the system is a fusion system
5525 * (APU or DGPU in a fusion system).
5526 * todo: check if the system is a fusion platform.
5528 bool fusion_platform = false;
5530 if (radeon_aspm == 0)
5533 if (!(rdev->flags & RADEON_IS_PCIE))
5536 switch (rdev->family) {
5549 disable_l0s = false;
5553 if (rdev->flags & RADEON_IS_IGP)
5554 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5556 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5557 if (fusion_platform)
5562 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5564 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5565 if (fusion_platform)
5570 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5572 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5573 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5575 if (rdev->family >= CHIP_BARTS)
5576 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5578 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5582 if (rdev->family >= CHIP_BARTS)
5583 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5585 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5587 if (!disable_plloff_in_l1) {
5588 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5589 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5590 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5592 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5594 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5595 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5596 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5598 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5600 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5601 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5602 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5604 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5606 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5607 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5608 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5610 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5612 if (rdev->family >= CHIP_BARTS) {
5613 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5614 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5615 data |= PLL_RAMP_UP_TIME_0(4);
5617 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5619 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5620 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5621 data |= PLL_RAMP_UP_TIME_1(4);
5623 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5625 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5626 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5627 data |= PLL_RAMP_UP_TIME_0(4);
5629 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5631 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5632 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5633 data |= PLL_RAMP_UP_TIME_1(4);
5635 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5638 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5639 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5640 data |= LC_DYN_LANES_PWR_STATE(3);
5642 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5644 if (rdev->family >= CHIP_BARTS) {
5645 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5646 data &= ~LS2_EXIT_TIME_MASK;
5647 data |= LS2_EXIT_TIME(1);
5649 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5651 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5652 data &= ~LS2_EXIT_TIME_MASK;
5653 data |= LS2_EXIT_TIME(1);
5655 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5660 /* evergreen parts only */
5661 if (rdev->family < CHIP_BARTS)
5662 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5664 if (pcie_lc_cntl != pcie_lc_cntl_old)
5665 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);