2 * Copyright 2012 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include "radeon_asic.h"
31 #include <linux/math64.h>
32 #include <linux/seq_file.h>
34 #define MC_CG_ARB_FREQ_F0 0x0a
35 #define MC_CG_ARB_FREQ_F1 0x0b
36 #define MC_CG_ARB_FREQ_F2 0x0c
37 #define MC_CG_ARB_FREQ_F3 0x0d
39 #define SMC_RAM_END 0xC000
41 static const struct ni_cac_weights cac_weights_cayman_xt =
105 { 0, 0, 0, 0, 0, 0, 0, 0 },
110 static const struct ni_cac_weights cac_weights_cayman_pro =
174 { 0, 0, 0, 0, 0, 0, 0, 0 },
179 static const struct ni_cac_weights cac_weights_cayman_le =
243 { 0, 0, 0, 0, 0, 0, 0, 0 },
248 #define NISLANDS_MGCG_SEQUENCE 300
250 static const u32 cayman_cgcg_cgls_default[] =
252 0x000008f8, 0x00000010, 0xffffffff,
253 0x000008fc, 0x00000000, 0xffffffff,
254 0x000008f8, 0x00000011, 0xffffffff,
255 0x000008fc, 0x00000000, 0xffffffff,
256 0x000008f8, 0x00000012, 0xffffffff,
257 0x000008fc, 0x00000000, 0xffffffff,
258 0x000008f8, 0x00000013, 0xffffffff,
259 0x000008fc, 0x00000000, 0xffffffff,
260 0x000008f8, 0x00000014, 0xffffffff,
261 0x000008fc, 0x00000000, 0xffffffff,
262 0x000008f8, 0x00000015, 0xffffffff,
263 0x000008fc, 0x00000000, 0xffffffff,
264 0x000008f8, 0x00000016, 0xffffffff,
265 0x000008fc, 0x00000000, 0xffffffff,
266 0x000008f8, 0x00000017, 0xffffffff,
267 0x000008fc, 0x00000000, 0xffffffff,
268 0x000008f8, 0x00000018, 0xffffffff,
269 0x000008fc, 0x00000000, 0xffffffff,
270 0x000008f8, 0x00000019, 0xffffffff,
271 0x000008fc, 0x00000000, 0xffffffff,
272 0x000008f8, 0x0000001a, 0xffffffff,
273 0x000008fc, 0x00000000, 0xffffffff,
274 0x000008f8, 0x0000001b, 0xffffffff,
275 0x000008fc, 0x00000000, 0xffffffff,
276 0x000008f8, 0x00000020, 0xffffffff,
277 0x000008fc, 0x00000000, 0xffffffff,
278 0x000008f8, 0x00000021, 0xffffffff,
279 0x000008fc, 0x00000000, 0xffffffff,
280 0x000008f8, 0x00000022, 0xffffffff,
281 0x000008fc, 0x00000000, 0xffffffff,
282 0x000008f8, 0x00000023, 0xffffffff,
283 0x000008fc, 0x00000000, 0xffffffff,
284 0x000008f8, 0x00000024, 0xffffffff,
285 0x000008fc, 0x00000000, 0xffffffff,
286 0x000008f8, 0x00000025, 0xffffffff,
287 0x000008fc, 0x00000000, 0xffffffff,
288 0x000008f8, 0x00000026, 0xffffffff,
289 0x000008fc, 0x00000000, 0xffffffff,
290 0x000008f8, 0x00000027, 0xffffffff,
291 0x000008fc, 0x00000000, 0xffffffff,
292 0x000008f8, 0x00000028, 0xffffffff,
293 0x000008fc, 0x00000000, 0xffffffff,
294 0x000008f8, 0x00000029, 0xffffffff,
295 0x000008fc, 0x00000000, 0xffffffff,
296 0x000008f8, 0x0000002a, 0xffffffff,
297 0x000008fc, 0x00000000, 0xffffffff,
298 0x000008f8, 0x0000002b, 0xffffffff,
299 0x000008fc, 0x00000000, 0xffffffff
301 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
303 static const u32 cayman_cgcg_cgls_disable[] =
305 0x000008f8, 0x00000010, 0xffffffff,
306 0x000008fc, 0xffffffff, 0xffffffff,
307 0x000008f8, 0x00000011, 0xffffffff,
308 0x000008fc, 0xffffffff, 0xffffffff,
309 0x000008f8, 0x00000012, 0xffffffff,
310 0x000008fc, 0xffffffff, 0xffffffff,
311 0x000008f8, 0x00000013, 0xffffffff,
312 0x000008fc, 0xffffffff, 0xffffffff,
313 0x000008f8, 0x00000014, 0xffffffff,
314 0x000008fc, 0xffffffff, 0xffffffff,
315 0x000008f8, 0x00000015, 0xffffffff,
316 0x000008fc, 0xffffffff, 0xffffffff,
317 0x000008f8, 0x00000016, 0xffffffff,
318 0x000008fc, 0xffffffff, 0xffffffff,
319 0x000008f8, 0x00000017, 0xffffffff,
320 0x000008fc, 0xffffffff, 0xffffffff,
321 0x000008f8, 0x00000018, 0xffffffff,
322 0x000008fc, 0xffffffff, 0xffffffff,
323 0x000008f8, 0x00000019, 0xffffffff,
324 0x000008fc, 0xffffffff, 0xffffffff,
325 0x000008f8, 0x0000001a, 0xffffffff,
326 0x000008fc, 0xffffffff, 0xffffffff,
327 0x000008f8, 0x0000001b, 0xffffffff,
328 0x000008fc, 0xffffffff, 0xffffffff,
329 0x000008f8, 0x00000020, 0xffffffff,
330 0x000008fc, 0x00000000, 0xffffffff,
331 0x000008f8, 0x00000021, 0xffffffff,
332 0x000008fc, 0x00000000, 0xffffffff,
333 0x000008f8, 0x00000022, 0xffffffff,
334 0x000008fc, 0x00000000, 0xffffffff,
335 0x000008f8, 0x00000023, 0xffffffff,
336 0x000008fc, 0x00000000, 0xffffffff,
337 0x000008f8, 0x00000024, 0xffffffff,
338 0x000008fc, 0x00000000, 0xffffffff,
339 0x000008f8, 0x00000025, 0xffffffff,
340 0x000008fc, 0x00000000, 0xffffffff,
341 0x000008f8, 0x00000026, 0xffffffff,
342 0x000008fc, 0x00000000, 0xffffffff,
343 0x000008f8, 0x00000027, 0xffffffff,
344 0x000008fc, 0x00000000, 0xffffffff,
345 0x000008f8, 0x00000028, 0xffffffff,
346 0x000008fc, 0x00000000, 0xffffffff,
347 0x000008f8, 0x00000029, 0xffffffff,
348 0x000008fc, 0x00000000, 0xffffffff,
349 0x000008f8, 0x0000002a, 0xffffffff,
350 0x000008fc, 0x00000000, 0xffffffff,
351 0x000008f8, 0x0000002b, 0xffffffff,
352 0x000008fc, 0x00000000, 0xffffffff,
353 0x00000644, 0x000f7902, 0x001f4180,
354 0x00000644, 0x000f3802, 0x001f4180
356 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
358 static const u32 cayman_cgcg_cgls_enable[] =
360 0x00000644, 0x000f7882, 0x001f4080,
361 0x000008f8, 0x00000010, 0xffffffff,
362 0x000008fc, 0x00000000, 0xffffffff,
363 0x000008f8, 0x00000011, 0xffffffff,
364 0x000008fc, 0x00000000, 0xffffffff,
365 0x000008f8, 0x00000012, 0xffffffff,
366 0x000008fc, 0x00000000, 0xffffffff,
367 0x000008f8, 0x00000013, 0xffffffff,
368 0x000008fc, 0x00000000, 0xffffffff,
369 0x000008f8, 0x00000014, 0xffffffff,
370 0x000008fc, 0x00000000, 0xffffffff,
371 0x000008f8, 0x00000015, 0xffffffff,
372 0x000008fc, 0x00000000, 0xffffffff,
373 0x000008f8, 0x00000016, 0xffffffff,
374 0x000008fc, 0x00000000, 0xffffffff,
375 0x000008f8, 0x00000017, 0xffffffff,
376 0x000008fc, 0x00000000, 0xffffffff,
377 0x000008f8, 0x00000018, 0xffffffff,
378 0x000008fc, 0x00000000, 0xffffffff,
379 0x000008f8, 0x00000019, 0xffffffff,
380 0x000008fc, 0x00000000, 0xffffffff,
381 0x000008f8, 0x0000001a, 0xffffffff,
382 0x000008fc, 0x00000000, 0xffffffff,
383 0x000008f8, 0x0000001b, 0xffffffff,
384 0x000008fc, 0x00000000, 0xffffffff,
385 0x000008f8, 0x00000020, 0xffffffff,
386 0x000008fc, 0xffffffff, 0xffffffff,
387 0x000008f8, 0x00000021, 0xffffffff,
388 0x000008fc, 0xffffffff, 0xffffffff,
389 0x000008f8, 0x00000022, 0xffffffff,
390 0x000008fc, 0xffffffff, 0xffffffff,
391 0x000008f8, 0x00000023, 0xffffffff,
392 0x000008fc, 0xffffffff, 0xffffffff,
393 0x000008f8, 0x00000024, 0xffffffff,
394 0x000008fc, 0xffffffff, 0xffffffff,
395 0x000008f8, 0x00000025, 0xffffffff,
396 0x000008fc, 0xffffffff, 0xffffffff,
397 0x000008f8, 0x00000026, 0xffffffff,
398 0x000008fc, 0xffffffff, 0xffffffff,
399 0x000008f8, 0x00000027, 0xffffffff,
400 0x000008fc, 0xffffffff, 0xffffffff,
401 0x000008f8, 0x00000028, 0xffffffff,
402 0x000008fc, 0xffffffff, 0xffffffff,
403 0x000008f8, 0x00000029, 0xffffffff,
404 0x000008fc, 0xffffffff, 0xffffffff,
405 0x000008f8, 0x0000002a, 0xffffffff,
406 0x000008fc, 0xffffffff, 0xffffffff,
407 0x000008f8, 0x0000002b, 0xffffffff,
408 0x000008fc, 0xffffffff, 0xffffffff
410 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
412 static const u32 cayman_mgcg_default[] =
414 0x0000802c, 0xc0000000, 0xffffffff,
415 0x00003fc4, 0xc0000000, 0xffffffff,
416 0x00005448, 0x00000100, 0xffffffff,
417 0x000055e4, 0x00000100, 0xffffffff,
418 0x0000160c, 0x00000100, 0xffffffff,
419 0x00008984, 0x06000100, 0xffffffff,
420 0x0000c164, 0x00000100, 0xffffffff,
421 0x00008a18, 0x00000100, 0xffffffff,
422 0x0000897c, 0x06000100, 0xffffffff,
423 0x00008b28, 0x00000100, 0xffffffff,
424 0x00009144, 0x00800200, 0xffffffff,
425 0x00009a60, 0x00000100, 0xffffffff,
426 0x00009868, 0x00000100, 0xffffffff,
427 0x00008d58, 0x00000100, 0xffffffff,
428 0x00009510, 0x00000100, 0xffffffff,
429 0x0000949c, 0x00000100, 0xffffffff,
430 0x00009654, 0x00000100, 0xffffffff,
431 0x00009030, 0x00000100, 0xffffffff,
432 0x00009034, 0x00000100, 0xffffffff,
433 0x00009038, 0x00000100, 0xffffffff,
434 0x0000903c, 0x00000100, 0xffffffff,
435 0x00009040, 0x00000100, 0xffffffff,
436 0x0000a200, 0x00000100, 0xffffffff,
437 0x0000a204, 0x00000100, 0xffffffff,
438 0x0000a208, 0x00000100, 0xffffffff,
439 0x0000a20c, 0x00000100, 0xffffffff,
440 0x00009744, 0x00000100, 0xffffffff,
441 0x00003f80, 0x00000100, 0xffffffff,
442 0x0000a210, 0x00000100, 0xffffffff,
443 0x0000a214, 0x00000100, 0xffffffff,
444 0x000004d8, 0x00000100, 0xffffffff,
445 0x00009664, 0x00000100, 0xffffffff,
446 0x00009698, 0x00000100, 0xffffffff,
447 0x000004d4, 0x00000200, 0xffffffff,
448 0x000004d0, 0x00000000, 0xffffffff,
449 0x000030cc, 0x00000104, 0xffffffff,
450 0x0000d0c0, 0x00000100, 0xffffffff,
451 0x0000d8c0, 0x00000100, 0xffffffff,
452 0x0000802c, 0x40000000, 0xffffffff,
453 0x00003fc4, 0x40000000, 0xffffffff,
454 0x0000915c, 0x00010000, 0xffffffff,
455 0x00009160, 0x00030002, 0xffffffff,
456 0x00009164, 0x00050004, 0xffffffff,
457 0x00009168, 0x00070006, 0xffffffff,
458 0x00009178, 0x00070000, 0xffffffff,
459 0x0000917c, 0x00030002, 0xffffffff,
460 0x00009180, 0x00050004, 0xffffffff,
461 0x0000918c, 0x00010006, 0xffffffff,
462 0x00009190, 0x00090008, 0xffffffff,
463 0x00009194, 0x00070000, 0xffffffff,
464 0x00009198, 0x00030002, 0xffffffff,
465 0x0000919c, 0x00050004, 0xffffffff,
466 0x000091a8, 0x00010006, 0xffffffff,
467 0x000091ac, 0x00090008, 0xffffffff,
468 0x000091b0, 0x00070000, 0xffffffff,
469 0x000091b4, 0x00030002, 0xffffffff,
470 0x000091b8, 0x00050004, 0xffffffff,
471 0x000091c4, 0x00010006, 0xffffffff,
472 0x000091c8, 0x00090008, 0xffffffff,
473 0x000091cc, 0x00070000, 0xffffffff,
474 0x000091d0, 0x00030002, 0xffffffff,
475 0x000091d4, 0x00050004, 0xffffffff,
476 0x000091e0, 0x00010006, 0xffffffff,
477 0x000091e4, 0x00090008, 0xffffffff,
478 0x000091e8, 0x00000000, 0xffffffff,
479 0x000091ec, 0x00070000, 0xffffffff,
480 0x000091f0, 0x00030002, 0xffffffff,
481 0x000091f4, 0x00050004, 0xffffffff,
482 0x00009200, 0x00010006, 0xffffffff,
483 0x00009204, 0x00090008, 0xffffffff,
484 0x00009208, 0x00070000, 0xffffffff,
485 0x0000920c, 0x00030002, 0xffffffff,
486 0x00009210, 0x00050004, 0xffffffff,
487 0x0000921c, 0x00010006, 0xffffffff,
488 0x00009220, 0x00090008, 0xffffffff,
489 0x00009224, 0x00070000, 0xffffffff,
490 0x00009228, 0x00030002, 0xffffffff,
491 0x0000922c, 0x00050004, 0xffffffff,
492 0x00009238, 0x00010006, 0xffffffff,
493 0x0000923c, 0x00090008, 0xffffffff,
494 0x00009240, 0x00070000, 0xffffffff,
495 0x00009244, 0x00030002, 0xffffffff,
496 0x00009248, 0x00050004, 0xffffffff,
497 0x00009254, 0x00010006, 0xffffffff,
498 0x00009258, 0x00090008, 0xffffffff,
499 0x0000925c, 0x00070000, 0xffffffff,
500 0x00009260, 0x00030002, 0xffffffff,
501 0x00009264, 0x00050004, 0xffffffff,
502 0x00009270, 0x00010006, 0xffffffff,
503 0x00009274, 0x00090008, 0xffffffff,
504 0x00009278, 0x00070000, 0xffffffff,
505 0x0000927c, 0x00030002, 0xffffffff,
506 0x00009280, 0x00050004, 0xffffffff,
507 0x0000928c, 0x00010006, 0xffffffff,
508 0x00009290, 0x00090008, 0xffffffff,
509 0x000092a8, 0x00070000, 0xffffffff,
510 0x000092ac, 0x00030002, 0xffffffff,
511 0x000092b0, 0x00050004, 0xffffffff,
512 0x000092bc, 0x00010006, 0xffffffff,
513 0x000092c0, 0x00090008, 0xffffffff,
514 0x000092c4, 0x00070000, 0xffffffff,
515 0x000092c8, 0x00030002, 0xffffffff,
516 0x000092cc, 0x00050004, 0xffffffff,
517 0x000092d8, 0x00010006, 0xffffffff,
518 0x000092dc, 0x00090008, 0xffffffff,
519 0x00009294, 0x00000000, 0xffffffff,
520 0x0000802c, 0x40010000, 0xffffffff,
521 0x00003fc4, 0x40010000, 0xffffffff,
522 0x0000915c, 0x00010000, 0xffffffff,
523 0x00009160, 0x00030002, 0xffffffff,
524 0x00009164, 0x00050004, 0xffffffff,
525 0x00009168, 0x00070006, 0xffffffff,
526 0x00009178, 0x00070000, 0xffffffff,
527 0x0000917c, 0x00030002, 0xffffffff,
528 0x00009180, 0x00050004, 0xffffffff,
529 0x0000918c, 0x00010006, 0xffffffff,
530 0x00009190, 0x00090008, 0xffffffff,
531 0x00009194, 0x00070000, 0xffffffff,
532 0x00009198, 0x00030002, 0xffffffff,
533 0x0000919c, 0x00050004, 0xffffffff,
534 0x000091a8, 0x00010006, 0xffffffff,
535 0x000091ac, 0x00090008, 0xffffffff,
536 0x000091b0, 0x00070000, 0xffffffff,
537 0x000091b4, 0x00030002, 0xffffffff,
538 0x000091b8, 0x00050004, 0xffffffff,
539 0x000091c4, 0x00010006, 0xffffffff,
540 0x000091c8, 0x00090008, 0xffffffff,
541 0x000091cc, 0x00070000, 0xffffffff,
542 0x000091d0, 0x00030002, 0xffffffff,
543 0x000091d4, 0x00050004, 0xffffffff,
544 0x000091e0, 0x00010006, 0xffffffff,
545 0x000091e4, 0x00090008, 0xffffffff,
546 0x000091e8, 0x00000000, 0xffffffff,
547 0x000091ec, 0x00070000, 0xffffffff,
548 0x000091f0, 0x00030002, 0xffffffff,
549 0x000091f4, 0x00050004, 0xffffffff,
550 0x00009200, 0x00010006, 0xffffffff,
551 0x00009204, 0x00090008, 0xffffffff,
552 0x00009208, 0x00070000, 0xffffffff,
553 0x0000920c, 0x00030002, 0xffffffff,
554 0x00009210, 0x00050004, 0xffffffff,
555 0x0000921c, 0x00010006, 0xffffffff,
556 0x00009220, 0x00090008, 0xffffffff,
557 0x00009224, 0x00070000, 0xffffffff,
558 0x00009228, 0x00030002, 0xffffffff,
559 0x0000922c, 0x00050004, 0xffffffff,
560 0x00009238, 0x00010006, 0xffffffff,
561 0x0000923c, 0x00090008, 0xffffffff,
562 0x00009240, 0x00070000, 0xffffffff,
563 0x00009244, 0x00030002, 0xffffffff,
564 0x00009248, 0x00050004, 0xffffffff,
565 0x00009254, 0x00010006, 0xffffffff,
566 0x00009258, 0x00090008, 0xffffffff,
567 0x0000925c, 0x00070000, 0xffffffff,
568 0x00009260, 0x00030002, 0xffffffff,
569 0x00009264, 0x00050004, 0xffffffff,
570 0x00009270, 0x00010006, 0xffffffff,
571 0x00009274, 0x00090008, 0xffffffff,
572 0x00009278, 0x00070000, 0xffffffff,
573 0x0000927c, 0x00030002, 0xffffffff,
574 0x00009280, 0x00050004, 0xffffffff,
575 0x0000928c, 0x00010006, 0xffffffff,
576 0x00009290, 0x00090008, 0xffffffff,
577 0x000092a8, 0x00070000, 0xffffffff,
578 0x000092ac, 0x00030002, 0xffffffff,
579 0x000092b0, 0x00050004, 0xffffffff,
580 0x000092bc, 0x00010006, 0xffffffff,
581 0x000092c0, 0x00090008, 0xffffffff,
582 0x000092c4, 0x00070000, 0xffffffff,
583 0x000092c8, 0x00030002, 0xffffffff,
584 0x000092cc, 0x00050004, 0xffffffff,
585 0x000092d8, 0x00010006, 0xffffffff,
586 0x000092dc, 0x00090008, 0xffffffff,
587 0x00009294, 0x00000000, 0xffffffff,
588 0x0000802c, 0xc0000000, 0xffffffff,
589 0x00003fc4, 0xc0000000, 0xffffffff,
590 0x000008f8, 0x00000010, 0xffffffff,
591 0x000008fc, 0x00000000, 0xffffffff,
592 0x000008f8, 0x00000011, 0xffffffff,
593 0x000008fc, 0x00000000, 0xffffffff,
594 0x000008f8, 0x00000012, 0xffffffff,
595 0x000008fc, 0x00000000, 0xffffffff,
596 0x000008f8, 0x00000013, 0xffffffff,
597 0x000008fc, 0x00000000, 0xffffffff,
598 0x000008f8, 0x00000014, 0xffffffff,
599 0x000008fc, 0x00000000, 0xffffffff,
600 0x000008f8, 0x00000015, 0xffffffff,
601 0x000008fc, 0x00000000, 0xffffffff,
602 0x000008f8, 0x00000016, 0xffffffff,
603 0x000008fc, 0x00000000, 0xffffffff,
604 0x000008f8, 0x00000017, 0xffffffff,
605 0x000008fc, 0x00000000, 0xffffffff,
606 0x000008f8, 0x00000018, 0xffffffff,
607 0x000008fc, 0x00000000, 0xffffffff,
608 0x000008f8, 0x00000019, 0xffffffff,
609 0x000008fc, 0x00000000, 0xffffffff,
610 0x000008f8, 0x0000001a, 0xffffffff,
611 0x000008fc, 0x00000000, 0xffffffff,
612 0x000008f8, 0x0000001b, 0xffffffff,
613 0x000008fc, 0x00000000, 0xffffffff
615 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
617 static const u32 cayman_mgcg_disable[] =
619 0x0000802c, 0xc0000000, 0xffffffff,
620 0x000008f8, 0x00000000, 0xffffffff,
621 0x000008fc, 0xffffffff, 0xffffffff,
622 0x000008f8, 0x00000001, 0xffffffff,
623 0x000008fc, 0xffffffff, 0xffffffff,
624 0x000008f8, 0x00000002, 0xffffffff,
625 0x000008fc, 0xffffffff, 0xffffffff,
626 0x000008f8, 0x00000003, 0xffffffff,
627 0x000008fc, 0xffffffff, 0xffffffff,
628 0x00009150, 0x00600000, 0xffffffff
630 #define CAYMAN_MGCG_DISABLE_LENGTH sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
632 static const u32 cayman_mgcg_enable[] =
634 0x0000802c, 0xc0000000, 0xffffffff,
635 0x000008f8, 0x00000000, 0xffffffff,
636 0x000008fc, 0x00000000, 0xffffffff,
637 0x000008f8, 0x00000001, 0xffffffff,
638 0x000008fc, 0x00000000, 0xffffffff,
639 0x000008f8, 0x00000002, 0xffffffff,
640 0x000008fc, 0x00600000, 0xffffffff,
641 0x000008f8, 0x00000003, 0xffffffff,
642 0x000008fc, 0x00000000, 0xffffffff,
643 0x00009150, 0x96944200, 0xffffffff
646 #define CAYMAN_MGCG_ENABLE_LENGTH sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
648 #define NISLANDS_SYSLS_SEQUENCE 100
650 static const u32 cayman_sysls_default[] =
652 /* Register, Value, Mask bits */
653 0x000055e8, 0x00000000, 0xffffffff,
654 0x0000d0bc, 0x00000000, 0xffffffff,
655 0x0000d8bc, 0x00000000, 0xffffffff,
656 0x000015c0, 0x000c1401, 0xffffffff,
657 0x0000264c, 0x000c0400, 0xffffffff,
658 0x00002648, 0x000c0400, 0xffffffff,
659 0x00002650, 0x000c0400, 0xffffffff,
660 0x000020b8, 0x000c0400, 0xffffffff,
661 0x000020bc, 0x000c0400, 0xffffffff,
662 0x000020c0, 0x000c0c80, 0xffffffff,
663 0x0000f4a0, 0x000000c0, 0xffffffff,
664 0x0000f4a4, 0x00680fff, 0xffffffff,
665 0x00002f50, 0x00000404, 0xffffffff,
666 0x000004c8, 0x00000001, 0xffffffff,
667 0x000064ec, 0x00000000, 0xffffffff,
668 0x00000c7c, 0x00000000, 0xffffffff,
669 0x00008dfc, 0x00000000, 0xffffffff
671 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
673 static const u32 cayman_sysls_disable[] =
675 /* Register, Value, Mask bits */
676 0x0000d0c0, 0x00000000, 0xffffffff,
677 0x0000d8c0, 0x00000000, 0xffffffff,
678 0x000055e8, 0x00000000, 0xffffffff,
679 0x0000d0bc, 0x00000000, 0xffffffff,
680 0x0000d8bc, 0x00000000, 0xffffffff,
681 0x000015c0, 0x00041401, 0xffffffff,
682 0x0000264c, 0x00040400, 0xffffffff,
683 0x00002648, 0x00040400, 0xffffffff,
684 0x00002650, 0x00040400, 0xffffffff,
685 0x000020b8, 0x00040400, 0xffffffff,
686 0x000020bc, 0x00040400, 0xffffffff,
687 0x000020c0, 0x00040c80, 0xffffffff,
688 0x0000f4a0, 0x000000c0, 0xffffffff,
689 0x0000f4a4, 0x00680000, 0xffffffff,
690 0x00002f50, 0x00000404, 0xffffffff,
691 0x000004c8, 0x00000001, 0xffffffff,
692 0x000064ec, 0x00007ffd, 0xffffffff,
693 0x00000c7c, 0x0000ff00, 0xffffffff,
694 0x00008dfc, 0x0000007f, 0xffffffff
696 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
698 static const u32 cayman_sysls_enable[] =
700 /* Register, Value, Mask bits */
701 0x000055e8, 0x00000001, 0xffffffff,
702 0x0000d0bc, 0x00000100, 0xffffffff,
703 0x0000d8bc, 0x00000100, 0xffffffff,
704 0x000015c0, 0x000c1401, 0xffffffff,
705 0x0000264c, 0x000c0400, 0xffffffff,
706 0x00002648, 0x000c0400, 0xffffffff,
707 0x00002650, 0x000c0400, 0xffffffff,
708 0x000020b8, 0x000c0400, 0xffffffff,
709 0x000020bc, 0x000c0400, 0xffffffff,
710 0x000020c0, 0x000c0c80, 0xffffffff,
711 0x0000f4a0, 0x000000c0, 0xffffffff,
712 0x0000f4a4, 0x00680fff, 0xffffffff,
713 0x00002f50, 0x00000903, 0xffffffff,
714 0x000004c8, 0x00000000, 0xffffffff,
715 0x000064ec, 0x00000000, 0xffffffff,
716 0x00000c7c, 0x00000000, 0xffffffff,
717 0x00008dfc, 0x00000000, 0xffffffff
719 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
721 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
722 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
723 struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
724 struct ni_ps *ni_get_ps(struct radeon_ps *rps);
725 void ni_dpm_reset_asic(struct radeon_device *rdev);
727 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
729 struct ni_power_info *pi = rdev->pm.dpm.priv;
734 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
736 struct ni_ps *ps = rps->ps_priv;
741 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
746 s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
748 i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
749 vddc = div64_s64(drm_int2fixp(v), 1000);
750 temperature = div64_s64(drm_int2fixp(t), 1000);
752 kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
753 drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
754 kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
755 drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
757 leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
759 *leakage = drm_fixp2int(leakage_w * 1000);
762 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
763 const struct ni_leakage_coeffients *coeff,
769 ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
772 bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
774 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
775 u32 vblank_time = r600_dpm_get_vblank_time(rdev);
776 u32 switch_limit = pi->mem_gddr5 ? 450 : 300;
778 if (vblank_time < switch_limit)
785 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
786 struct radeon_ps *rps)
788 struct ni_ps *ps = ni_get_ps(rps);
789 struct radeon_clock_and_voltage_limits *max_limits;
790 bool disable_mclk_switching;
795 if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
796 ni_dpm_vblank_too_short(rdev))
797 disable_mclk_switching = true;
799 disable_mclk_switching = false;
801 if (rdev->pm.dpm.ac_power)
802 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
804 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
806 if (rdev->pm.dpm.ac_power == false) {
807 for (i = 0; i < ps->performance_level_count; i++) {
808 if (ps->performance_levels[i].mclk > max_limits->mclk)
809 ps->performance_levels[i].mclk = max_limits->mclk;
810 if (ps->performance_levels[i].sclk > max_limits->sclk)
811 ps->performance_levels[i].sclk = max_limits->sclk;
812 if (ps->performance_levels[i].vddc > max_limits->vddc)
813 ps->performance_levels[i].vddc = max_limits->vddc;
814 if (ps->performance_levels[i].vddci > max_limits->vddci)
815 ps->performance_levels[i].vddci = max_limits->vddci;
819 /* XXX validate the min clocks required for display */
821 if (disable_mclk_switching) {
822 mclk = ps->performance_levels[ps->performance_level_count - 1].mclk;
823 sclk = ps->performance_levels[0].sclk;
824 vddc = ps->performance_levels[0].vddc;
825 vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
827 sclk = ps->performance_levels[0].sclk;
828 mclk = ps->performance_levels[0].mclk;
829 vddc = ps->performance_levels[0].vddc;
830 vddci = ps->performance_levels[0].vddci;
833 /* adjusted low state */
834 ps->performance_levels[0].sclk = sclk;
835 ps->performance_levels[0].mclk = mclk;
836 ps->performance_levels[0].vddc = vddc;
837 ps->performance_levels[0].vddci = vddci;
839 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
840 &ps->performance_levels[0].sclk,
841 &ps->performance_levels[0].mclk);
843 for (i = 1; i < ps->performance_level_count; i++) {
844 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
845 ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
846 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
847 ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
850 if (disable_mclk_switching) {
851 mclk = ps->performance_levels[0].mclk;
852 for (i = 1; i < ps->performance_level_count; i++) {
853 if (mclk < ps->performance_levels[i].mclk)
854 mclk = ps->performance_levels[i].mclk;
856 for (i = 0; i < ps->performance_level_count; i++) {
857 ps->performance_levels[i].mclk = mclk;
858 ps->performance_levels[i].vddci = vddci;
861 for (i = 1; i < ps->performance_level_count; i++) {
862 if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
863 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
864 if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
865 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
869 for (i = 1; i < ps->performance_level_count; i++)
870 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
871 &ps->performance_levels[i].sclk,
872 &ps->performance_levels[i].mclk);
874 for (i = 0; i < ps->performance_level_count; i++)
875 btc_adjust_clock_combinations(rdev, max_limits,
876 &ps->performance_levels[i]);
878 for (i = 0; i < ps->performance_level_count; i++) {
879 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
880 ps->performance_levels[i].sclk,
881 max_limits->vddc, &ps->performance_levels[i].vddc);
882 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
883 ps->performance_levels[i].mclk,
884 max_limits->vddci, &ps->performance_levels[i].vddci);
885 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
886 ps->performance_levels[i].mclk,
887 max_limits->vddc, &ps->performance_levels[i].vddc);
888 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
889 rdev->clock.current_dispclk,
890 max_limits->vddc, &ps->performance_levels[i].vddc);
893 for (i = 0; i < ps->performance_level_count; i++) {
894 btc_apply_voltage_delta_rules(rdev,
895 max_limits->vddc, max_limits->vddci,
896 &ps->performance_levels[i].vddc,
897 &ps->performance_levels[i].vddci);
900 ps->dc_compatible = true;
901 for (i = 0; i < ps->performance_level_count; i++) {
902 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
903 ps->dc_compatible = false;
905 if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
906 ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
910 static void ni_cg_clockgating_default(struct radeon_device *rdev)
913 const u32 *ps = NULL;
915 ps = (const u32 *)&cayman_cgcg_cgls_default;
916 count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
918 btc_program_mgcg_hw_sequence(rdev, ps, count);
921 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
925 const u32 *ps = NULL;
928 ps = (const u32 *)&cayman_cgcg_cgls_enable;
929 count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
931 ps = (const u32 *)&cayman_cgcg_cgls_disable;
932 count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
935 btc_program_mgcg_hw_sequence(rdev, ps, count);
938 static void ni_mg_clockgating_default(struct radeon_device *rdev)
941 const u32 *ps = NULL;
943 ps = (const u32 *)&cayman_mgcg_default;
944 count = CAYMAN_MGCG_DEFAULT_LENGTH;
946 btc_program_mgcg_hw_sequence(rdev, ps, count);
949 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
953 const u32 *ps = NULL;
956 ps = (const u32 *)&cayman_mgcg_enable;
957 count = CAYMAN_MGCG_ENABLE_LENGTH;
959 ps = (const u32 *)&cayman_mgcg_disable;
960 count = CAYMAN_MGCG_DISABLE_LENGTH;
963 btc_program_mgcg_hw_sequence(rdev, ps, count);
966 static void ni_ls_clockgating_default(struct radeon_device *rdev)
969 const u32 *ps = NULL;
971 ps = (const u32 *)&cayman_sysls_default;
972 count = CAYMAN_SYSLS_DEFAULT_LENGTH;
974 btc_program_mgcg_hw_sequence(rdev, ps, count);
977 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
981 const u32 *ps = NULL;
984 ps = (const u32 *)&cayman_sysls_enable;
985 count = CAYMAN_SYSLS_ENABLE_LENGTH;
987 ps = (const u32 *)&cayman_sysls_disable;
988 count = CAYMAN_SYSLS_DISABLE_LENGTH;
991 btc_program_mgcg_hw_sequence(rdev, ps, count);
995 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
996 struct radeon_clock_voltage_dependency_table *table)
998 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1002 for (i = 0; i < table->count; i++) {
1003 if (0xff01 == table->entries[i].v) {
1004 if (pi->max_vddc == 0)
1006 table->entries[i].v = pi->max_vddc;
1013 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1017 ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1018 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1020 ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1021 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1025 static void ni_stop_dpm(struct radeon_device *rdev)
1027 WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1031 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1035 return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1042 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1043 PPSMC_Msg msg, u32 parameter)
1045 WREG32(SMC_SCRATCH0, parameter);
1046 return rv770_send_msg_to_smc(rdev, msg);
1049 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1051 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1054 return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1058 int ni_dpm_force_performance_level(struct radeon_device *rdev,
1059 enum radeon_dpm_forced_level level)
1061 if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1062 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1065 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1067 } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1068 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1071 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1073 } else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1074 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1077 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1081 rdev->pm.dpm.forced_level = level;
1086 static void ni_stop_smc(struct radeon_device *rdev)
1091 for (i = 0; i < rdev->usec_timeout; i++) {
1092 tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1100 r7xx_stop_smc(rdev);
1103 static int ni_process_firmware_header(struct radeon_device *rdev)
1105 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1106 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1107 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1111 ret = rv770_read_smc_sram_dword(rdev,
1112 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1113 NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1114 &tmp, pi->sram_end);
1119 pi->state_table_start = (u16)tmp;
1121 ret = rv770_read_smc_sram_dword(rdev,
1122 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1123 NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1124 &tmp, pi->sram_end);
1129 pi->soft_regs_start = (u16)tmp;
1131 ret = rv770_read_smc_sram_dword(rdev,
1132 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1133 NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1134 &tmp, pi->sram_end);
1139 eg_pi->mc_reg_table_start = (u16)tmp;
1141 ret = rv770_read_smc_sram_dword(rdev,
1142 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1143 NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1144 &tmp, pi->sram_end);
1149 ni_pi->fan_table_start = (u16)tmp;
1151 ret = rv770_read_smc_sram_dword(rdev,
1152 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1153 NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1154 &tmp, pi->sram_end);
1159 ni_pi->arb_table_start = (u16)tmp;
1161 ret = rv770_read_smc_sram_dword(rdev,
1162 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1163 NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1164 &tmp, pi->sram_end);
1169 ni_pi->cac_table_start = (u16)tmp;
1171 ret = rv770_read_smc_sram_dword(rdev,
1172 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1173 NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1174 &tmp, pi->sram_end);
1179 ni_pi->spll_table_start = (u16)tmp;
1185 static void ni_read_clock_registers(struct radeon_device *rdev)
1187 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1189 ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1190 ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1191 ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1192 ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1193 ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1194 ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1195 ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1196 ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1197 ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1198 ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1199 ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1200 ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1201 ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1202 ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1206 static int ni_enter_ulp_state(struct radeon_device *rdev)
1208 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1210 if (pi->gfx_clock_gating) {
1211 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1212 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1213 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1214 RREG32(GB_ADDR_CONFIG);
1217 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1218 ~HOST_SMC_MSG_MASK);
1226 static void ni_program_response_times(struct radeon_device *rdev)
1228 u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1229 u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1230 u32 reference_clock;
1232 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1234 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1235 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1237 if (voltage_response_time == 0)
1238 voltage_response_time = 1000;
1240 if (backbias_response_time == 0)
1241 backbias_response_time = 1000;
1243 acpi_delay_time = 15000;
1244 vbi_time_out = 100000;
1246 reference_clock = radeon_get_xclk(rdev);
1248 vddc_dly = (voltage_response_time * reference_clock) / 1600;
1249 bb_dly = (backbias_response_time * reference_clock) / 1600;
1250 acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1251 vbi_dly = (vbi_time_out * reference_clock) / 1600;
1253 mclk_switch_limit = (460 * reference_clock) / 100;
1255 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1256 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1257 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1258 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1259 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1260 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1263 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1264 struct atom_voltage_table *voltage_table,
1265 NISLANDS_SMC_STATETABLE *table)
1269 for (i = 0; i < voltage_table->count; i++) {
1270 table->highSMIO[i] = 0;
1271 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1275 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1276 NISLANDS_SMC_STATETABLE *table)
1278 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1279 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1282 if (eg_pi->vddc_voltage_table.count) {
1283 ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1284 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1285 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1286 cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1288 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1289 if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1290 table->maxVDDCIndexInPPTable = i;
1296 if (eg_pi->vddci_voltage_table.count) {
1297 ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1299 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1300 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1301 cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1305 static int ni_populate_voltage_value(struct radeon_device *rdev,
1306 struct atom_voltage_table *table,
1308 NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1312 for (i = 0; i < table->count; i++) {
1313 if (value <= table->entries[i].value) {
1314 voltage->index = (u8)i;
1315 voltage->value = cpu_to_be16(table->entries[i].value);
1320 if (i >= table->count)
1326 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1328 NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1330 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1331 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1333 if (!pi->mvdd_control) {
1334 voltage->index = eg_pi->mvdd_high_index;
1335 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1339 if (mclk <= pi->mvdd_split_frequency) {
1340 voltage->index = eg_pi->mvdd_low_index;
1341 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1343 voltage->index = eg_pi->mvdd_high_index;
1344 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1348 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1349 NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1352 if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1353 ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1354 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1356 *std_voltage = be16_to_cpu(voltage->value);
1361 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1362 u16 value, u8 index,
1363 NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1365 voltage->index = index;
1366 voltage->value = cpu_to_be16(value);
1369 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1372 u32 xclk = radeon_get_xclk(rdev);
1373 u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1375 xclk_period = (1000000000UL / xclk);
1376 xclk_period /= 10000UL;
1378 return tmp * xclk_period;
1381 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1383 return (power_in_watts * scaling_factor) << 2;
1386 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1387 struct radeon_ps *radeon_state,
1390 struct ni_ps *state = ni_get_ps(radeon_state);
1391 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1392 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1393 u32 power_boost_limit = 0;
1396 if (ni_pi->enable_power_containment &&
1397 ni_pi->use_power_boost_limit) {
1398 NISLANDS_SMC_VOLTAGE_VALUE vddc;
1403 if (state->performance_level_count < 3)
1406 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1407 state->performance_levels[state->performance_level_count - 2].vddc,
1412 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1416 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1417 state->performance_levels[state->performance_level_count - 1].vddc,
1422 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1426 n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1427 d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1428 tmp = div64_u64(n, d);
1432 power_boost_limit = (u32)tmp;
1435 return power_boost_limit;
1438 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1439 bool adjust_polarity,
1442 u32 *near_tdp_limit)
1444 if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1447 if (adjust_polarity) {
1448 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1449 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1451 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1452 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1458 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1459 struct radeon_ps *radeon_state)
1461 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1462 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1464 if (ni_pi->enable_power_containment) {
1465 NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1466 u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1469 u32 power_boost_limit;
1472 if (scaling_factor == 0)
1475 memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1477 ret = ni_calculate_adjusted_tdp_limits(rdev,
1479 rdev->pm.dpm.tdp_adjustment,
1485 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1488 smc_table->dpm2Params.TDPLimit =
1489 cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1490 smc_table->dpm2Params.NearTDPLimit =
1491 cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1492 smc_table->dpm2Params.SafePowerLimit =
1493 cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1495 smc_table->dpm2Params.PowerBoostLimit =
1496 cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1498 ret = rv770_copy_bytes_to_smc(rdev,
1499 (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1500 offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1501 (u8 *)(&smc_table->dpm2Params.TDPLimit),
1502 sizeof(u32) * 4, pi->sram_end);
1510 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1511 u32 arb_freq_src, u32 arb_freq_dest)
1513 u32 mc_arb_dram_timing;
1514 u32 mc_arb_dram_timing2;
1518 switch (arb_freq_src) {
1519 case MC_CG_ARB_FREQ_F0:
1520 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1521 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1522 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1524 case MC_CG_ARB_FREQ_F1:
1525 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_1);
1526 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1527 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1529 case MC_CG_ARB_FREQ_F2:
1530 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_2);
1531 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1532 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1534 case MC_CG_ARB_FREQ_F3:
1535 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_3);
1536 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1537 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1543 switch (arb_freq_dest) {
1544 case MC_CG_ARB_FREQ_F0:
1545 WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1546 WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1547 WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1549 case MC_CG_ARB_FREQ_F1:
1550 WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1551 WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1552 WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1554 case MC_CG_ARB_FREQ_F2:
1555 WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1556 WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1557 WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1559 case MC_CG_ARB_FREQ_F3:
1560 WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1561 WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1562 WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1568 mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1569 WREG32(MC_CG_CONFIG, mc_cg_config);
1570 WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1575 static int ni_init_arb_table_index(struct radeon_device *rdev)
1577 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1578 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1582 ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1583 &tmp, pi->sram_end);
1588 tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1590 return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1594 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1596 return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1599 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1601 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1602 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1606 ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1607 &tmp, pi->sram_end);
1611 tmp = (tmp >> 24) & 0xff;
1613 if (tmp == MC_CG_ARB_FREQ_F0)
1616 return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1619 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1620 struct rv7xx_pl *pl,
1621 SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1626 arb_regs->mc_arb_rfsh_rate =
1627 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1630 radeon_atom_set_engine_dram_timings(rdev,
1634 dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1635 dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1637 arb_regs->mc_arb_dram_timing = cpu_to_be32(dram_timing);
1638 arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1643 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1644 struct radeon_ps *radeon_state,
1645 unsigned int first_arb_set)
1647 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1648 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1649 struct ni_ps *state = ni_get_ps(radeon_state);
1650 SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1653 for (i = 0; i < state->performance_level_count; i++) {
1654 ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1658 ret = rv770_copy_bytes_to_smc(rdev,
1659 (u16)(ni_pi->arb_table_start +
1660 offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1661 sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1663 (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1671 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1672 struct radeon_ps *radeon_new_state)
1674 return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1675 NISLANDS_DRIVER_STATE_ARB_INDEX);
1678 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1679 struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1681 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1683 voltage->index = eg_pi->mvdd_high_index;
1684 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1687 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1688 struct radeon_ps *radeon_initial_state,
1689 NISLANDS_SMC_STATETABLE *table)
1691 struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1692 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1693 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1694 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1698 table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1699 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1700 table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1701 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1702 table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1703 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1704 table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1705 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1706 table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1707 cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1708 table->initialState.levels[0].mclk.vDLL_CNTL =
1709 cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1710 table->initialState.levels[0].mclk.vMPLL_SS =
1711 cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1712 table->initialState.levels[0].mclk.vMPLL_SS2 =
1713 cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1714 table->initialState.levels[0].mclk.mclk_value =
1715 cpu_to_be32(initial_state->performance_levels[0].mclk);
1717 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1718 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1719 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1720 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1721 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1722 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1723 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1724 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1725 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1726 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1727 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1728 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1729 table->initialState.levels[0].sclk.sclk_value =
1730 cpu_to_be32(initial_state->performance_levels[0].sclk);
1731 table->initialState.levels[0].arbRefreshState =
1732 NISLANDS_INITIAL_STATE_ARB_INDEX;
1734 table->initialState.levels[0].ACIndex = 0;
1736 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1737 initial_state->performance_levels[0].vddc,
1738 &table->initialState.levels[0].vddc);
1742 ret = ni_get_std_voltage_value(rdev,
1743 &table->initialState.levels[0].vddc,
1746 ni_populate_std_voltage_value(rdev, std_vddc,
1747 table->initialState.levels[0].vddc.index,
1748 &table->initialState.levels[0].std_vddc);
1751 if (eg_pi->vddci_control)
1752 ni_populate_voltage_value(rdev,
1753 &eg_pi->vddci_voltage_table,
1754 initial_state->performance_levels[0].vddci,
1755 &table->initialState.levels[0].vddci);
1757 ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1759 reg = CG_R(0xffff) | CG_L(0);
1760 table->initialState.levels[0].aT = cpu_to_be32(reg);
1762 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1764 if (pi->boot_in_gen2)
1765 table->initialState.levels[0].gen2PCIE = 1;
1767 table->initialState.levels[0].gen2PCIE = 0;
1769 if (pi->mem_gddr5) {
1770 table->initialState.levels[0].strobeMode =
1771 cypress_get_strobe_mode_settings(rdev,
1772 initial_state->performance_levels[0].mclk);
1774 if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1775 table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1777 table->initialState.levels[0].mcFlags = 0;
1780 table->initialState.levelCount = 1;
1782 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1784 table->initialState.levels[0].dpm2.MaxPS = 0;
1785 table->initialState.levels[0].dpm2.NearTDPDec = 0;
1786 table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1787 table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1789 reg = MIN_POWER_MASK | MAX_POWER_MASK;
1790 table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1792 reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1793 table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1798 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1799 NISLANDS_SMC_STATETABLE *table)
1801 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1802 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1803 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1804 u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
1805 u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1806 u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
1807 u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1808 u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
1809 u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
1810 u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
1811 u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
1812 u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1813 u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
1817 table->ACPIState = table->initialState;
1819 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1821 if (pi->acpi_vddc) {
1822 ret = ni_populate_voltage_value(rdev,
1823 &eg_pi->vddc_voltage_table,
1824 pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1828 ret = ni_get_std_voltage_value(rdev,
1829 &table->ACPIState.levels[0].vddc, &std_vddc);
1831 ni_populate_std_voltage_value(rdev, std_vddc,
1832 table->ACPIState.levels[0].vddc.index,
1833 &table->ACPIState.levels[0].std_vddc);
1836 if (pi->pcie_gen2) {
1837 if (pi->acpi_pcie_gen2)
1838 table->ACPIState.levels[0].gen2PCIE = 1;
1840 table->ACPIState.levels[0].gen2PCIE = 0;
1842 table->ACPIState.levels[0].gen2PCIE = 0;
1845 ret = ni_populate_voltage_value(rdev,
1846 &eg_pi->vddc_voltage_table,
1847 pi->min_vddc_in_table,
1848 &table->ACPIState.levels[0].vddc);
1852 ret = ni_get_std_voltage_value(rdev,
1853 &table->ACPIState.levels[0].vddc,
1856 ni_populate_std_voltage_value(rdev, std_vddc,
1857 table->ACPIState.levels[0].vddc.index,
1858 &table->ACPIState.levels[0].std_vddc);
1860 table->ACPIState.levels[0].gen2PCIE = 0;
1863 if (eg_pi->acpi_vddci) {
1864 if (eg_pi->vddci_control)
1865 ni_populate_voltage_value(rdev,
1866 &eg_pi->vddci_voltage_table,
1868 &table->ACPIState.levels[0].vddci);
1872 mpll_ad_func_cntl &= ~PDNB;
1874 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1877 mpll_dq_func_cntl &= ~PDNB;
1878 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1881 mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1890 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1899 dll_cntl |= (MRDCKA0_BYPASS |
1908 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1909 spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1911 table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1912 table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1913 table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1914 table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1915 table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1916 table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1918 table->ACPIState.levels[0].mclk.mclk_value = 0;
1920 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1921 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1922 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1923 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1925 table->ACPIState.levels[0].sclk.sclk_value = 0;
1927 ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1929 if (eg_pi->dynamic_ac_timing)
1930 table->ACPIState.levels[0].ACIndex = 1;
1932 table->ACPIState.levels[0].dpm2.MaxPS = 0;
1933 table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1934 table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1935 table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1937 reg = MIN_POWER_MASK | MAX_POWER_MASK;
1938 table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1940 reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1941 table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1946 static int ni_init_smc_table(struct radeon_device *rdev)
1948 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1949 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1951 struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1952 NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1954 memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1956 ni_populate_smc_voltage_tables(rdev, table);
1958 switch (rdev->pm.int_thermal_type) {
1959 case THERMAL_TYPE_NI:
1960 case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1961 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1963 case THERMAL_TYPE_NONE:
1964 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1967 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1971 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1972 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1974 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1975 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1977 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1978 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1981 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1983 ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1987 ret = ni_populate_smc_acpi_state(rdev, table);
1991 table->driverState = table->initialState;
1993 table->ULVState = table->initialState;
1995 ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1996 NISLANDS_INITIAL_STATE_ARB_INDEX);
2000 return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
2001 sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
2004 static int ni_calculate_sclk_params(struct radeon_device *rdev,
2006 NISLANDS_SMC_SCLK_VALUE *sclk)
2008 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2009 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2010 struct atom_clock_dividers dividers;
2011 u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2012 u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2013 u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2014 u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2015 u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2016 u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2018 u32 reference_clock = rdev->clock.spll.reference_freq;
2019 u32 reference_divider;
2023 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2024 engine_clock, false, ÷rs);
2028 reference_divider = 1 + dividers.ref_div;
2031 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2032 do_div(tmp, reference_clock);
2035 spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2036 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2037 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2039 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2040 spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2042 spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2043 spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2044 spll_func_cntl_3 |= SPLL_DITHEN;
2047 struct radeon_atom_ss ss;
2048 u32 vco_freq = engine_clock * dividers.post_div;
2050 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2051 ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2052 u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2053 u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2055 cg_spll_spread_spectrum &= ~CLK_S_MASK;
2056 cg_spll_spread_spectrum |= CLK_S(clk_s);
2057 cg_spll_spread_spectrum |= SSEN;
2059 cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2060 cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2064 sclk->sclk_value = engine_clock;
2065 sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2066 sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2067 sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2068 sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2069 sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2070 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2075 static int ni_populate_sclk_value(struct radeon_device *rdev,
2077 NISLANDS_SMC_SCLK_VALUE *sclk)
2079 NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2082 ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2084 sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2085 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2086 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2087 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2088 sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2089 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2090 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2096 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2098 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2099 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2100 SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2101 NISLANDS_SMC_SCLK_VALUE sclk_params;
2110 if (ni_pi->spll_table_start == 0)
2113 spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2114 if (spll_table == NULL)
2117 for (i = 0; i < 256; i++) {
2118 ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2122 p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2123 fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2124 clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2125 clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2127 fb_div &= ~0x00001FFF;
2131 if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2134 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2137 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2140 if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2146 tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2147 ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2148 spll_table->freq[i] = cpu_to_be32(tmp);
2150 tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2151 ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2152 spll_table->ss[i] = cpu_to_be32(tmp);
2158 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2159 sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2166 static int ni_populate_mclk_value(struct radeon_device *rdev,
2169 NISLANDS_SMC_MCLK_VALUE *mclk,
2173 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2174 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2175 u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2176 u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2177 u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2178 u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2179 u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2180 u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2181 u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2182 u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2183 struct atom_clock_dividers dividers;
2189 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2190 memory_clock, strobe_mode, ÷rs);
2195 mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2197 if (mc_seq_misc7 & 0x8000000)
2198 dividers.post_div = 1;
2201 ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2203 mpll_ad_func_cntl &= ~(CLKR_MASK |
2204 YCLK_POST_DIV_MASK |
2208 mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2209 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2210 mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2211 mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2212 mpll_ad_func_cntl |= IBIAS(ibias);
2214 if (dividers.vco_mode)
2215 mpll_ad_func_cntl_2 |= VCO_MODE;
2217 mpll_ad_func_cntl_2 &= ~VCO_MODE;
2219 if (pi->mem_gddr5) {
2220 mpll_dq_func_cntl &= ~(CLKR_MASK |
2221 YCLK_POST_DIV_MASK |
2225 mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2226 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2227 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2228 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2229 mpll_dq_func_cntl |= IBIAS(ibias);
2232 mpll_dq_func_cntl &= ~PDNB;
2234 mpll_dq_func_cntl |= PDNB;
2236 if (dividers.vco_mode)
2237 mpll_dq_func_cntl_2 |= VCO_MODE;
2239 mpll_dq_func_cntl_2 &= ~VCO_MODE;
2243 struct radeon_atom_ss ss;
2244 u32 vco_freq = memory_clock * dividers.post_div;
2246 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2247 ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2248 u32 reference_clock = rdev->clock.mpll.reference_freq;
2249 u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2250 u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2251 u32 clk_v = ss.percentage *
2252 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2254 mpll_ss1 &= ~CLKV_MASK;
2255 mpll_ss1 |= CLKV(clk_v);
2257 mpll_ss2 &= ~CLKS_MASK;
2258 mpll_ss2 |= CLKS(clk_s);
2262 dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2265 mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2266 mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2268 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2277 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2287 mclk->mclk_value = cpu_to_be32(memory_clock);
2288 mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2289 mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2290 mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2291 mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2292 mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2293 mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2294 mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2295 mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2300 static void ni_populate_smc_sp(struct radeon_device *rdev,
2301 struct radeon_ps *radeon_state,
2302 NISLANDS_SMC_SWSTATE *smc_state)
2304 struct ni_ps *ps = ni_get_ps(radeon_state);
2305 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2308 for (i = 0; i < ps->performance_level_count - 1; i++)
2309 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2311 smc_state->levels[ps->performance_level_count - 1].bSP =
2312 cpu_to_be32(pi->psp);
2315 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2316 struct rv7xx_pl *pl,
2317 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2319 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2320 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2321 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2325 u32 tmp = RREG32(DC_STUTTER_CNTL);
2327 level->gen2PCIE = pi->pcie_gen2 ?
2328 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2330 ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2335 if (pi->mclk_stutter_mode_threshold &&
2336 (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2337 !eg_pi->uvd_enabled &&
2338 (tmp & DC_STUTTER_ENABLE_A) &&
2339 (tmp & DC_STUTTER_ENABLE_B))
2340 level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2342 if (pi->mem_gddr5) {
2343 if (pl->mclk > pi->mclk_edc_enable_threshold)
2344 level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2345 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2346 level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2348 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2350 if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2351 if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2352 ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2353 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2355 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2357 dll_state_on = false;
2358 if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2359 level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2362 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2364 (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2367 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2372 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2373 pl->vddc, &level->vddc);
2377 ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2381 ni_populate_std_voltage_value(rdev, std_vddc,
2382 level->vddc.index, &level->std_vddc);
2384 if (eg_pi->vddci_control) {
2385 ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2386 pl->vddci, &level->vddci);
2391 ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2396 static int ni_populate_smc_t(struct radeon_device *rdev,
2397 struct radeon_ps *radeon_state,
2398 NISLANDS_SMC_SWSTATE *smc_state)
2400 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2401 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2402 struct ni_ps *state = ni_get_ps(radeon_state);
2408 if (state->performance_level_count >= 9)
2411 if (state->performance_level_count < 2) {
2412 a_t = CG_R(0xffff) | CG_L(0);
2413 smc_state->levels[0].aT = cpu_to_be32(a_t);
2417 smc_state->levels[0].aT = cpu_to_be32(0);
2419 for (i = 0; i <= state->performance_level_count - 2; i++) {
2420 if (eg_pi->uvd_enabled)
2421 ret = r600_calculate_at(
2422 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2424 state->performance_levels[i + 1].sclk,
2425 state->performance_levels[i].sclk,
2429 ret = r600_calculate_at(
2432 state->performance_levels[i + 1].sclk,
2433 state->performance_levels[i].sclk,
2438 t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2439 t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2442 a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2443 a_t |= CG_R(t_l * pi->bsp / 20000);
2444 smc_state->levels[i].aT = cpu_to_be32(a_t);
2446 high_bsp = (i == state->performance_level_count - 2) ?
2449 a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2450 smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2456 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2457 struct radeon_ps *radeon_state,
2458 NISLANDS_SMC_SWSTATE *smc_state)
2460 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2461 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2462 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2463 struct ni_ps *state = ni_get_ps(radeon_state);
2470 u32 power_boost_limit;
2473 if (ni_pi->enable_power_containment == false)
2476 if (state->performance_level_count == 0)
2479 if (smc_state->levelCount != state->performance_level_count)
2482 ret = ni_calculate_adjusted_tdp_limits(rdev,
2484 rdev->pm.dpm.tdp_adjustment,
2490 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2492 ret = rv770_write_smc_sram_dword(rdev,
2493 pi->state_table_start +
2494 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2495 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2496 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2499 power_boost_limit = 0;
2501 smc_state->levels[0].dpm2.MaxPS = 0;
2502 smc_state->levels[0].dpm2.NearTDPDec = 0;
2503 smc_state->levels[0].dpm2.AboveSafeInc = 0;
2504 smc_state->levels[0].dpm2.BelowSafeInc = 0;
2505 smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2507 for (i = 1; i < state->performance_level_count; i++) {
2508 prev_sclk = state->performance_levels[i-1].sclk;
2509 max_sclk = state->performance_levels[i].sclk;
2510 max_ps_percent = (i != (state->performance_level_count - 1)) ?
2511 NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2513 if (max_sclk < prev_sclk)
2516 if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2517 min_sclk = max_sclk;
2519 min_sclk = prev_sclk;
2521 min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2523 if (min_sclk < state->performance_levels[0].sclk)
2524 min_sclk = state->performance_levels[0].sclk;
2529 smc_state->levels[i].dpm2.MaxPS =
2530 (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2531 smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2532 smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2533 smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2534 smc_state->levels[i].stateFlags |=
2535 ((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2536 PPSMC_STATEFLAG_POWERBOOST : 0;
2542 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2543 struct radeon_ps *radeon_state,
2544 NISLANDS_SMC_SWSTATE *smc_state)
2546 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2547 struct ni_ps *state = ni_get_ps(radeon_state);
2548 u32 sq_power_throttle;
2549 u32 sq_power_throttle2;
2550 bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2553 if (state->performance_level_count == 0)
2556 if (smc_state->levelCount != state->performance_level_count)
2559 if (rdev->pm.dpm.sq_ramping_threshold == 0)
2562 if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2563 enable_sq_ramping = false;
2565 if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2566 enable_sq_ramping = false;
2568 if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2569 enable_sq_ramping = false;
2571 if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2572 enable_sq_ramping = false;
2574 if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2575 enable_sq_ramping = false;
2577 for (i = 0; i < state->performance_level_count; i++) {
2578 sq_power_throttle = 0;
2579 sq_power_throttle2 = 0;
2581 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2582 enable_sq_ramping) {
2583 sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2584 sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2585 sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2586 sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2587 sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2589 sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2590 sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2593 smc_state->levels[i].SQPowerThrottle = cpu_to_be32(sq_power_throttle);
2594 smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2600 static int ni_enable_power_containment(struct radeon_device *rdev,
2601 struct radeon_ps *radeon_new_state,
2604 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2605 PPSMC_Result smc_result;
2608 if (ni_pi->enable_power_containment) {
2610 if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2611 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2612 if (smc_result != PPSMC_Result_OK) {
2614 ni_pi->pc_enabled = false;
2616 ni_pi->pc_enabled = true;
2620 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2621 if (smc_result != PPSMC_Result_OK)
2623 ni_pi->pc_enabled = false;
2630 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2631 struct radeon_ps *radeon_state,
2632 NISLANDS_SMC_SWSTATE *smc_state)
2634 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2635 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2636 struct ni_ps *state = ni_get_ps(radeon_state);
2638 u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2640 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2641 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2643 smc_state->levelCount = 0;
2645 if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2648 for (i = 0; i < state->performance_level_count; i++) {
2649 ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2650 &smc_state->levels[i]);
2651 smc_state->levels[i].arbRefreshState =
2652 (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2657 if (ni_pi->enable_power_containment)
2658 smc_state->levels[i].displayWatermark =
2659 (state->performance_levels[i].sclk < threshold) ?
2660 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2662 smc_state->levels[i].displayWatermark = (i < 2) ?
2663 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2665 if (eg_pi->dynamic_ac_timing)
2666 smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2668 smc_state->levels[i].ACIndex = 0;
2670 smc_state->levelCount++;
2673 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2674 cpu_to_be32(threshold / 512));
2676 ni_populate_smc_sp(rdev, radeon_state, smc_state);
2678 ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2680 ni_pi->enable_power_containment = false;
2682 ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2684 ni_pi->enable_sq_ramping = false;
2686 return ni_populate_smc_t(rdev, radeon_state, smc_state);
2689 static int ni_upload_sw_state(struct radeon_device *rdev,
2690 struct radeon_ps *radeon_new_state)
2692 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2693 u16 address = pi->state_table_start +
2694 offsetof(NISLANDS_SMC_STATETABLE, driverState);
2695 u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2696 ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2698 NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2700 if (smc_state == NULL)
2703 ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2707 ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2715 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2716 struct ni_mc_reg_table *table)
2718 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2722 for (i = 0, j = table->last; i < table->last; i++) {
2723 switch (table->mc_reg_address[i].s1) {
2724 case MC_SEQ_MISC1 >> 2:
2725 if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2727 temp_reg = RREG32(MC_PMG_CMD_EMRS);
2728 table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2729 table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2730 for (k = 0; k < table->num_entries; k++)
2731 table->mc_reg_table_entry[k].mc_data[j] =
2732 ((temp_reg & 0xffff0000)) |
2733 ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2735 if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2738 temp_reg = RREG32(MC_PMG_CMD_MRS);
2739 table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2740 table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2741 for(k = 0; k < table->num_entries; k++) {
2742 table->mc_reg_table_entry[k].mc_data[j] =
2743 (temp_reg & 0xffff0000) |
2744 (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2746 table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2749 if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2752 case MC_SEQ_RESERVE_M >> 2:
2753 temp_reg = RREG32(MC_PMG_CMD_MRS1);
2754 table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2755 table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2756 for (k = 0; k < table->num_entries; k++)
2757 table->mc_reg_table_entry[k].mc_data[j] =
2758 (temp_reg & 0xffff0000) |
2759 (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2761 if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2774 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2779 case MC_SEQ_RAS_TIMING >> 2:
2780 *out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2782 case MC_SEQ_CAS_TIMING >> 2:
2783 *out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2785 case MC_SEQ_MISC_TIMING >> 2:
2786 *out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2788 case MC_SEQ_MISC_TIMING2 >> 2:
2789 *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2791 case MC_SEQ_RD_CTL_D0 >> 2:
2792 *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2794 case MC_SEQ_RD_CTL_D1 >> 2:
2795 *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2797 case MC_SEQ_WR_CTL_D0 >> 2:
2798 *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2800 case MC_SEQ_WR_CTL_D1 >> 2:
2801 *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2803 case MC_PMG_CMD_EMRS >> 2:
2804 *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2806 case MC_PMG_CMD_MRS >> 2:
2807 *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2809 case MC_PMG_CMD_MRS1 >> 2:
2810 *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2812 case MC_SEQ_PMG_TIMING >> 2:
2813 *out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2815 case MC_PMG_CMD_MRS2 >> 2:
2816 *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2826 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2830 for (i = 0; i < table->last; i++) {
2831 for (j = 1; j < table->num_entries; j++) {
2832 if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2833 table->valid_flag |= 1 << i;
2840 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2845 for (i = 0; i < table->last; i++)
2846 table->mc_reg_address[i].s0 =
2847 ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2848 address : table->mc_reg_address[i].s1;
2851 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2852 struct ni_mc_reg_table *ni_table)
2856 if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2858 if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2861 for (i = 0; i < table->last; i++)
2862 ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2863 ni_table->last = table->last;
2865 for (i = 0; i < table->num_entries; i++) {
2866 ni_table->mc_reg_table_entry[i].mclk_max =
2867 table->mc_reg_table_entry[i].mclk_max;
2868 for (j = 0; j < table->last; j++)
2869 ni_table->mc_reg_table_entry[i].mc_data[j] =
2870 table->mc_reg_table_entry[i].mc_data[j];
2872 ni_table->num_entries = table->num_entries;
2877 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2879 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2881 struct atom_mc_reg_table *table;
2882 struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2883 u8 module_index = rv770_get_memory_module_index(rdev);
2885 table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2889 WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2890 WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2891 WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2892 WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2893 WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2894 WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2895 WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2896 WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2897 WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2898 WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2899 WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2900 WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2901 WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2903 ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2908 ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2913 ni_set_s0_mc_reg_index(ni_table);
2915 ret = ni_set_mc_special_registers(rdev, ni_table);
2920 ni_set_valid_flag(ni_table);
2928 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2929 SMC_NIslands_MCRegisters *mc_reg_table)
2931 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2934 for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2935 if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2936 if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2938 mc_reg_table->address[i].s0 =
2939 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2940 mc_reg_table->address[i].s1 =
2941 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2945 mc_reg_table->last = (u8)i;
2949 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2950 SMC_NIslands_MCRegisterSet *data,
2951 u32 num_entries, u32 valid_flag)
2955 for (i = 0, j = 0; j < num_entries; j++) {
2956 if (valid_flag & (1 << j)) {
2957 data->value[i] = cpu_to_be32(entry->mc_data[j]);
2963 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2964 struct rv7xx_pl *pl,
2965 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2967 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2970 for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2971 if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2975 if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2978 ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2980 ni_pi->mc_reg_table.last,
2981 ni_pi->mc_reg_table.valid_flag);
2984 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2985 struct radeon_ps *radeon_state,
2986 SMC_NIslands_MCRegisters *mc_reg_table)
2988 struct ni_ps *state = ni_get_ps(radeon_state);
2991 for (i = 0; i < state->performance_level_count; i++) {
2992 ni_convert_mc_reg_table_entry_to_smc(rdev,
2993 &state->performance_levels[i],
2994 &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2998 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2999 struct radeon_ps *radeon_boot_state)
3001 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3002 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3003 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3004 struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
3005 SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3007 memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3009 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3011 ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3013 ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3014 &mc_reg_table->data[0]);
3016 ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3017 &mc_reg_table->data[1],
3018 ni_pi->mc_reg_table.last,
3019 ni_pi->mc_reg_table.valid_flag);
3021 ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3023 return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3025 sizeof(SMC_NIslands_MCRegisters),
3029 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3030 struct radeon_ps *radeon_new_state)
3032 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3033 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3034 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3035 struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3036 SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3039 memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3041 ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3043 address = eg_pi->mc_reg_table_start +
3044 (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3046 return rv770_copy_bytes_to_smc(rdev, address,
3047 (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3048 sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3052 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3053 PP_NIslands_CACTABLES *cac_tables)
3055 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3056 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3058 unsigned int i, j, table_size;
3060 u32 smc_leakage, max_leakage = 0;
3063 table_size = eg_pi->vddc_voltage_table.count;
3065 if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3066 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3068 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3070 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3071 for (j = 0; j < table_size; j++) {
3072 t = (1000 * ((i + 1) * 8));
3074 if (t < ni_pi->cac_data.leakage_minimum_temperature)
3075 t = ni_pi->cac_data.leakage_minimum_temperature;
3077 ni_calculate_leakage_for_v_and_t(rdev,
3078 &ni_pi->cac_data.leakage_coefficients,
3079 eg_pi->vddc_voltage_table.entries[j].value,
3081 ni_pi->cac_data.i_leakage,
3084 smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3085 if (smc_leakage > max_leakage)
3086 max_leakage = smc_leakage;
3088 cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3092 for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3093 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3094 cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3099 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3100 PP_NIslands_CACTABLES *cac_tables)
3102 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3103 struct radeon_cac_leakage_table *leakage_table =
3104 &rdev->pm.dpm.dyn_state.cac_leakage_table;
3105 u32 i, j, table_size;
3106 u32 smc_leakage, max_leakage = 0;
3112 table_size = leakage_table->count;
3114 if (eg_pi->vddc_voltage_table.count != table_size)
3115 table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3116 eg_pi->vddc_voltage_table.count : leakage_table->count;
3118 if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3119 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3121 if (table_size == 0)
3124 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3126 for (j = 0; j < table_size; j++) {
3127 smc_leakage = leakage_table->entries[j].leakage;
3129 if (smc_leakage > max_leakage)
3130 max_leakage = smc_leakage;
3132 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3133 cac_tables->cac_lkge_lut[i][j] =
3134 cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3137 for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3138 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3139 cac_tables->cac_lkge_lut[i][j] =
3140 cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3145 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3147 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3148 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3149 PP_NIslands_CACTABLES *cac_tables = NULL;
3153 if (ni_pi->enable_cac == false)
3156 cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3160 reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3161 reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3162 TID_UNIT(ni_pi->cac_weights->tid_unit));
3163 WREG32(CG_CAC_CTRL, reg);
3165 for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3166 ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3168 for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3169 cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3171 ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3172 ni_pi->cac_data.pwr_const = 0;
3173 ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3174 ni_pi->cac_data.bif_cac_value = 0;
3175 ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3176 ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3177 ni_pi->cac_data.allow_ovrflw = 0;
3178 ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3179 ni_pi->cac_data.num_win_tdp = 0;
3180 ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3182 if (ni_pi->driver_calculate_cac_leakage)
3183 ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3185 ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3190 cac_tables->pwr_const = cpu_to_be32(ni_pi->cac_data.pwr_const);
3191 cac_tables->dc_cacValue = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3192 cac_tables->bif_cacValue = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3193 cac_tables->AllowOvrflw = ni_pi->cac_data.allow_ovrflw;
3194 cac_tables->MCWrWeight = ni_pi->cac_data.mc_wr_weight;
3195 cac_tables->MCRdWeight = ni_pi->cac_data.mc_rd_weight;
3196 cac_tables->numWin_TDP = ni_pi->cac_data.num_win_tdp;
3197 cac_tables->l2numWin_TDP = ni_pi->cac_data.l2num_win_tdp;
3198 cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3200 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3201 sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3205 ni_pi->enable_cac = false;
3206 ni_pi->enable_power_containment = false;
3214 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3216 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3219 if (!ni_pi->enable_cac ||
3220 !ni_pi->cac_configuration_required)
3223 if (ni_pi->cac_weights == NULL)
3226 reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3227 WEIGHT_TCP_SIG1_MASK |
3228 WEIGHT_TA_SIG_MASK);
3229 reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3230 WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3231 WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3232 WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3234 reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3235 WEIGHT_TCC_EN1_MASK |
3236 WEIGHT_TCC_EN2_MASK);
3237 reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3238 WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3239 WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3240 WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3242 reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3243 WEIGHT_CB_EN1_MASK |
3244 WEIGHT_CB_EN2_MASK |
3245 WEIGHT_CB_EN3_MASK);
3246 reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3247 WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3248 WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3249 WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3250 WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3252 reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3253 WEIGHT_DB_SIG1_MASK |
3254 WEIGHT_DB_SIG2_MASK |
3255 WEIGHT_DB_SIG3_MASK);
3256 reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3257 WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3258 WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3259 WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3260 WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3262 reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3263 WEIGHT_SXM_SIG1_MASK |
3264 WEIGHT_SXM_SIG2_MASK |
3265 WEIGHT_SXS_SIG0_MASK |
3266 WEIGHT_SXS_SIG1_MASK);
3267 reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3268 WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3269 WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3270 WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3271 WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3272 WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3274 reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3277 WEIGHT_SPI_SIG0_MASK);
3278 reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3279 WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3280 WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3281 WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3282 WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3284 reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3285 WEIGHT_SPI_SIG2_MASK |
3286 WEIGHT_SPI_SIG3_MASK |
3287 WEIGHT_SPI_SIG4_MASK |
3288 WEIGHT_SPI_SIG5_MASK);
3289 reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3290 WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3291 WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3292 WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3293 WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3294 WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3296 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3297 WEIGHT_LDS_SIG1_MASK |
3299 reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3300 WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3301 WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3302 WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3304 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3306 WEIGHT_PA_SIG0_MASK |
3307 WEIGHT_PA_SIG1_MASK |
3308 WEIGHT_VGT_SIG0_MASK);
3309 reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3310 WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3311 WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3312 WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3313 WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3314 WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3316 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3317 WEIGHT_VGT_SIG2_MASK |
3318 WEIGHT_DC_SIG0_MASK |
3319 WEIGHT_DC_SIG1_MASK |
3320 WEIGHT_DC_SIG2_MASK);
3321 reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3322 WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3323 WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3324 WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3325 WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3326 WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3328 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3329 WEIGHT_UVD_SIG0_MASK |
3330 WEIGHT_UVD_SIG1_MASK |
3331 WEIGHT_SPARE0_MASK |
3332 WEIGHT_SPARE1_MASK);
3333 reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3334 WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3335 WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3336 WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3337 WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3338 WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3340 reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3341 WEIGHT_SQ_VSP0_MASK);
3342 reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3343 WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3344 WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3346 reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3347 reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3348 WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3350 reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3351 OVR_VAL_SPARE_0_MASK |
3352 OVR_MODE_SPARE_1_MASK |
3353 OVR_VAL_SPARE_1_MASK);
3354 reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3355 OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3356 OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3357 OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3358 WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3360 reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3363 reg |= (VSP(ni_pi->cac_weights->vsp) |
3364 VSP0(ni_pi->cac_weights->vsp0) |
3365 GPR(ni_pi->cac_weights->gpr));
3366 WREG32(SQ_CAC_THRESHOLD, reg);
3368 reg = (MCDW_WR_ENABLE |
3373 WREG32(MC_CG_CONFIG, reg);
3375 reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3376 WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3378 WREG32(MC_CG_DATAPORT, reg);
3383 static int ni_enable_smc_cac(struct radeon_device *rdev,
3384 struct radeon_ps *radeon_new_state,
3387 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3389 PPSMC_Result smc_result;
3391 if (ni_pi->enable_cac) {
3393 if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3394 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3396 if (ni_pi->support_cac_long_term_average) {
3397 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3398 if (PPSMC_Result_OK != smc_result)
3399 ni_pi->support_cac_long_term_average = false;
3402 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3403 if (PPSMC_Result_OK != smc_result)
3406 ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3408 } else if (ni_pi->cac_enabled) {
3409 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3411 ni_pi->cac_enabled = false;
3413 if (ni_pi->support_cac_long_term_average) {
3414 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3415 if (PPSMC_Result_OK != smc_result)
3416 ni_pi->support_cac_long_term_average = false;
3424 static int ni_pcie_performance_request(struct radeon_device *rdev,
3425 u8 perf_req, bool advertise)
3427 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3429 #if defined(CONFIG_ACPI)
3430 if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3431 (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3432 if (eg_pi->pcie_performance_request_registered == false)
3433 radeon_acpi_pcie_notify_device_ready(rdev);
3434 eg_pi->pcie_performance_request_registered = true;
3435 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3436 } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3437 eg_pi->pcie_performance_request_registered) {
3438 eg_pi->pcie_performance_request_registered = false;
3439 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3445 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3447 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3450 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3452 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3453 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3454 pi->pcie_gen2 = true;
3456 pi->pcie_gen2 = false;
3459 ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3464 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3467 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3470 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3472 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3473 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3475 if (!pi->boot_in_gen2) {
3476 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3477 bif |= CG_CLIENT_REQ(0xd);
3478 WREG32(CG_BIF_REQ_AND_RSP, bif);
3480 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3481 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3482 tmp |= LC_GEN2_EN_STRAP;
3484 tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3485 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3487 tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3488 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3490 if (!pi->boot_in_gen2) {
3491 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3492 bif |= CG_CLIENT_REQ(0xd);
3493 WREG32(CG_BIF_REQ_AND_RSP, bif);
3495 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3496 tmp &= ~LC_GEN2_EN_STRAP;
3498 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3503 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3506 ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3509 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3511 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3514 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3515 struct radeon_ps *new_ps,
3516 struct radeon_ps *old_ps)
3518 struct ni_ps *new_state = ni_get_ps(new_ps);
3519 struct ni_ps *current_state = ni_get_ps(old_ps);
3521 if ((new_ps->vclk == old_ps->vclk) &&
3522 (new_ps->dclk == old_ps->dclk))
3525 if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3526 current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3529 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3532 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3533 struct radeon_ps *new_ps,
3534 struct radeon_ps *old_ps)
3536 struct ni_ps *new_state = ni_get_ps(new_ps);
3537 struct ni_ps *current_state = ni_get_ps(old_ps);
3539 if ((new_ps->vclk == old_ps->vclk) &&
3540 (new_ps->dclk == old_ps->dclk))
3543 if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3544 current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3547 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3550 void ni_dpm_setup_asic(struct radeon_device *rdev)
3552 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3554 ni_read_clock_registers(rdev);
3555 btc_read_arb_registers(rdev);
3556 rv770_get_memory_type(rdev);
3557 if (eg_pi->pcie_performance_request)
3558 ni_advertise_gen2_capability(rdev);
3559 rv770_get_pcie_gen2_status(rdev);
3560 rv770_enable_acpi_pm(rdev);
3563 void ni_update_current_ps(struct radeon_device *rdev,
3564 struct radeon_ps *rps)
3566 struct ni_ps *new_ps = ni_get_ps(rps);
3567 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3568 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3570 eg_pi->current_rps = *rps;
3571 ni_pi->current_ps = *new_ps;
3572 eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3575 void ni_update_requested_ps(struct radeon_device *rdev,
3576 struct radeon_ps *rps)
3578 struct ni_ps *new_ps = ni_get_ps(rps);
3579 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3580 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3582 eg_pi->requested_rps = *rps;
3583 ni_pi->requested_ps = *new_ps;
3584 eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3587 int ni_dpm_enable(struct radeon_device *rdev)
3589 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3590 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3591 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3594 if (pi->gfx_clock_gating)
3595 ni_cg_clockgating_default(rdev);
3596 if (btc_dpm_enabled(rdev))
3598 if (pi->mg_clock_gating)
3599 ni_mg_clockgating_default(rdev);
3600 if (eg_pi->ls_clock_gating)
3601 ni_ls_clockgating_default(rdev);
3602 if (pi->voltage_control) {
3603 rv770_enable_voltage_control(rdev, true);
3604 ret = cypress_construct_voltage_tables(rdev);
3606 DRM_ERROR("cypress_construct_voltage_tables failed\n");
3610 if (eg_pi->dynamic_ac_timing) {
3611 ret = ni_initialize_mc_reg_table(rdev);
3613 eg_pi->dynamic_ac_timing = false;
3616 cypress_enable_spread_spectrum(rdev, true);
3617 if (pi->thermal_protection)
3618 rv770_enable_thermal_protection(rdev, true);
3619 rv770_setup_bsp(rdev);
3620 rv770_program_git(rdev);
3621 rv770_program_tp(rdev);
3622 rv770_program_tpp(rdev);
3623 rv770_program_sstp(rdev);
3624 cypress_enable_display_gap(rdev);
3625 rv770_program_vc(rdev);
3626 if (pi->dynamic_pcie_gen2)
3627 ni_enable_dynamic_pcie_gen2(rdev, true);
3628 ret = rv770_upload_firmware(rdev);
3630 DRM_ERROR("rv770_upload_firmware failed\n");
3633 ret = ni_process_firmware_header(rdev);
3635 DRM_ERROR("ni_process_firmware_header failed\n");
3638 ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3640 DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3643 ret = ni_init_smc_table(rdev);
3645 DRM_ERROR("ni_init_smc_table failed\n");
3648 ret = ni_init_smc_spll_table(rdev);
3650 DRM_ERROR("ni_init_smc_spll_table failed\n");
3653 ret = ni_init_arb_table_index(rdev);
3655 DRM_ERROR("ni_init_arb_table_index failed\n");
3658 if (eg_pi->dynamic_ac_timing) {
3659 ret = ni_populate_mc_reg_table(rdev, boot_ps);
3661 DRM_ERROR("ni_populate_mc_reg_table failed\n");
3665 ret = ni_initialize_smc_cac_tables(rdev);
3667 DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3670 ret = ni_initialize_hardware_cac_manager(rdev);
3672 DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3675 ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3677 DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3680 ni_program_response_times(rdev);
3681 r7xx_start_smc(rdev);
3682 ret = cypress_notify_smc_display_change(rdev, false);
3684 DRM_ERROR("cypress_notify_smc_display_change failed\n");
3687 cypress_enable_sclk_control(rdev, true);
3688 if (eg_pi->memory_transition)
3689 cypress_enable_mclk_control(rdev, true);
3690 cypress_start_dpm(rdev);
3691 if (pi->gfx_clock_gating)
3692 ni_gfx_clockgating_enable(rdev, true);
3693 if (pi->mg_clock_gating)
3694 ni_mg_clockgating_enable(rdev, true);
3695 if (eg_pi->ls_clock_gating)
3696 ni_ls_clockgating_enable(rdev, true);
3698 if (rdev->irq.installed &&
3699 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3700 PPSMC_Result result;
3702 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000);
3705 rdev->irq.dpm_thermal = true;
3706 radeon_irq_set(rdev);
3707 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3709 if (result != PPSMC_Result_OK)
3710 DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
3713 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3715 ni_update_current_ps(rdev, boot_ps);
3720 void ni_dpm_disable(struct radeon_device *rdev)
3722 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3723 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3724 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3726 if (!btc_dpm_enabled(rdev))
3728 rv770_clear_vc(rdev);
3729 if (pi->thermal_protection)
3730 rv770_enable_thermal_protection(rdev, false);
3731 ni_enable_power_containment(rdev, boot_ps, false);
3732 ni_enable_smc_cac(rdev, boot_ps, false);
3733 cypress_enable_spread_spectrum(rdev, false);
3734 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3735 if (pi->dynamic_pcie_gen2)
3736 ni_enable_dynamic_pcie_gen2(rdev, false);
3738 if (rdev->irq.installed &&
3739 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3740 rdev->irq.dpm_thermal = false;
3741 radeon_irq_set(rdev);
3744 if (pi->gfx_clock_gating)
3745 ni_gfx_clockgating_enable(rdev, false);
3746 if (pi->mg_clock_gating)
3747 ni_mg_clockgating_enable(rdev, false);
3748 if (eg_pi->ls_clock_gating)
3749 ni_ls_clockgating_enable(rdev, false);
3751 btc_reset_to_default(rdev);
3753 ni_force_switch_to_arb_f0(rdev);
3755 ni_update_current_ps(rdev, boot_ps);
3758 static int ni_power_control_set_level(struct radeon_device *rdev)
3760 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3763 ret = ni_restrict_performance_levels_before_switch(rdev);
3766 ret = rv770_halt_smc(rdev);
3769 ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3772 ret = rv770_resume_smc(rdev);
3775 ret = rv770_set_sw_state(rdev);
3782 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3784 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3785 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3786 struct radeon_ps *new_ps = &requested_ps;
3788 ni_update_requested_ps(rdev, new_ps);
3790 ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3795 int ni_dpm_set_power_state(struct radeon_device *rdev)
3797 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3798 struct radeon_ps *new_ps = &eg_pi->requested_rps;
3799 struct radeon_ps *old_ps = &eg_pi->current_rps;
3802 ret = ni_restrict_performance_levels_before_switch(rdev);
3804 DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3807 ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3808 ret = ni_enable_power_containment(rdev, new_ps, false);
3810 DRM_ERROR("ni_enable_power_containment failed\n");
3813 ret = ni_enable_smc_cac(rdev, new_ps, false);
3815 DRM_ERROR("ni_enable_smc_cac failed\n");
3818 ret = rv770_halt_smc(rdev);
3820 DRM_ERROR("rv770_halt_smc failed\n");
3823 if (eg_pi->smu_uvd_hs)
3824 btc_notify_uvd_to_smc(rdev, new_ps);
3825 ret = ni_upload_sw_state(rdev, new_ps);
3827 DRM_ERROR("ni_upload_sw_state failed\n");
3830 if (eg_pi->dynamic_ac_timing) {
3831 ret = ni_upload_mc_reg_table(rdev, new_ps);
3833 DRM_ERROR("ni_upload_mc_reg_table failed\n");
3837 ret = ni_program_memory_timing_parameters(rdev, new_ps);
3839 DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3842 ret = rv770_resume_smc(rdev);
3844 DRM_ERROR("rv770_resume_smc failed\n");
3847 ret = rv770_set_sw_state(rdev);
3849 DRM_ERROR("rv770_set_sw_state failed\n");
3852 ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3853 ret = ni_enable_smc_cac(rdev, new_ps, true);
3855 DRM_ERROR("ni_enable_smc_cac failed\n");
3858 ret = ni_enable_power_containment(rdev, new_ps, true);
3860 DRM_ERROR("ni_enable_power_containment failed\n");
3865 ret = ni_power_control_set_level(rdev);
3867 DRM_ERROR("ni_power_control_set_level failed\n");
3871 ret = ni_dpm_force_performance_level(rdev, RADEON_DPM_FORCED_LEVEL_AUTO);
3873 DRM_ERROR("ni_dpm_force_performance_level failed\n");
3880 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3882 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3883 struct radeon_ps *new_ps = &eg_pi->requested_rps;
3885 ni_update_current_ps(rdev, new_ps);
3888 void ni_dpm_reset_asic(struct radeon_device *rdev)
3890 ni_restrict_performance_levels_before_switch(rdev);
3891 rv770_set_boot_state(rdev);
3895 struct _ATOM_POWERPLAY_INFO info;
3896 struct _ATOM_POWERPLAY_INFO_V2 info_2;
3897 struct _ATOM_POWERPLAY_INFO_V3 info_3;
3898 struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3899 struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3900 struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3903 union pplib_clock_info {
3904 struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3905 struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3906 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3907 struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3910 union pplib_power_state {
3911 struct _ATOM_PPLIB_STATE v1;
3912 struct _ATOM_PPLIB_STATE_V2 v2;
3915 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3916 struct radeon_ps *rps,
3917 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3920 rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3921 rps->class = le16_to_cpu(non_clock_info->usClassification);
3922 rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3924 if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3925 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3926 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3927 } else if (r600_is_uvd_state(rps->class, rps->class2)) {
3928 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3929 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3935 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3936 rdev->pm.dpm.boot_ps = rps;
3937 if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3938 rdev->pm.dpm.uvd_ps = rps;
3941 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3942 struct radeon_ps *rps, int index,
3943 union pplib_clock_info *clock_info)
3945 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3946 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3947 struct ni_ps *ps = ni_get_ps(rps);
3949 struct rv7xx_pl *pl = &ps->performance_levels[index];
3951 ps->performance_level_count = index + 1;
3953 pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3954 pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3955 pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3956 pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3958 pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3959 pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3960 pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3962 /* patch up vddc if necessary */
3963 if (pl->vddc == 0xff01) {
3964 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3968 if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3969 pi->acpi_vddc = pl->vddc;
3970 eg_pi->acpi_vddci = pl->vddci;
3971 if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3972 pi->acpi_pcie_gen2 = true;
3974 pi->acpi_pcie_gen2 = false;
3977 if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3978 eg_pi->ulv.supported = true;
3982 if (pi->min_vddc_in_table > pl->vddc)
3983 pi->min_vddc_in_table = pl->vddc;
3985 if (pi->max_vddc_in_table < pl->vddc)
3986 pi->max_vddc_in_table = pl->vddc;
3988 /* patch up boot state */
3989 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3990 u16 vddc, vddci, mvdd;
3991 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3992 pl->mclk = rdev->clock.default_mclk;
3993 pl->sclk = rdev->clock.default_sclk;
3998 if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3999 ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
4000 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
4001 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
4002 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
4003 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
4007 static int ni_parse_power_table(struct radeon_device *rdev)
4009 struct radeon_mode_info *mode_info = &rdev->mode_info;
4010 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
4011 union pplib_power_state *power_state;
4013 union pplib_clock_info *clock_info;
4014 union power_info *power_info;
4015 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
4020 if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
4021 &frev, &crev, &data_offset))
4023 power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset);
4025 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4026 power_info->pplib.ucNumStates, GFP_KERNEL);
4027 if (!rdev->pm.dpm.ps)
4029 rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
4030 rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
4031 rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
4033 for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4034 power_state = (union pplib_power_state *)
4035 ((uint8_t*)mode_info->atom_context->bios + data_offset +
4036 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4037 i * power_info->pplib.ucStateEntrySize);
4038 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4039 ((uint8_t*)mode_info->atom_context->bios + data_offset +
4040 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4041 (power_state->v1.ucNonClockStateIndex *
4042 power_info->pplib.ucNonClockSize));
4043 if (power_info->pplib.ucStateEntrySize - 1) {
4044 ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4046 kfree(rdev->pm.dpm.ps);
4049 rdev->pm.dpm.ps[i].ps_priv = ps;
4050 ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4052 power_info->pplib.ucNonClockSize);
4053 for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4054 clock_info = (union pplib_clock_info *)
4055 ((uint8_t*)mode_info->atom_context->bios + data_offset +
4056 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4057 (power_state->v1.ucClockStateIndices[j] *
4058 power_info->pplib.ucClockInfoSize));
4059 ni_parse_pplib_clock_info(rdev,
4060 &rdev->pm.dpm.ps[i], j,
4065 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4069 int ni_dpm_init(struct radeon_device *rdev)
4071 struct rv7xx_power_info *pi;
4072 struct evergreen_power_info *eg_pi;
4073 struct ni_power_info *ni_pi;
4074 struct atom_clock_dividers dividers;
4077 ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4080 rdev->pm.dpm.priv = ni_pi;
4084 rv770_get_max_vddc(rdev);
4086 eg_pi->ulv.supported = false;
4088 eg_pi->acpi_vddci = 0;
4089 pi->min_vddc_in_table = 0;
4090 pi->max_vddc_in_table = 0;
4092 ret = ni_parse_power_table(rdev);
4095 ret = r600_parse_extended_power_table(rdev);
4099 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4100 kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4101 if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4102 r600_free_extended_power_table(rdev);
4105 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4106 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4107 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4108 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4109 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4110 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4111 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4112 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4113 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4115 ni_patch_dependency_tables_based_on_leakage(rdev);
4117 if (rdev->pm.dpm.voltage_response_time == 0)
4118 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4119 if (rdev->pm.dpm.backbias_response_time == 0)
4120 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4122 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4123 0, false, ÷rs);
4125 pi->ref_div = dividers.ref_div + 1;
4127 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4129 pi->rlp = RV770_RLP_DFLT;
4130 pi->rmp = RV770_RMP_DFLT;
4131 pi->lhp = RV770_LHP_DFLT;
4132 pi->lmp = RV770_LMP_DFLT;
4134 eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4135 eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4136 eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4137 eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4139 eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4140 eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4141 eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4142 eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4144 eg_pi->smu_uvd_hs = true;
4146 if (rdev->ddev->pci_device == 0x6707) {
4147 pi->mclk_strobe_mode_threshold = 55000;
4148 pi->mclk_edc_enable_threshold = 55000;
4149 eg_pi->mclk_edc_wr_enable_threshold = 55000;
4151 pi->mclk_strobe_mode_threshold = 40000;
4152 pi->mclk_edc_enable_threshold = 40000;
4153 eg_pi->mclk_edc_wr_enable_threshold = 40000;
4155 ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4157 pi->voltage_control =
4158 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4161 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4163 eg_pi->vddci_control =
4164 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4166 rv770_get_engine_memory_ss(rdev);
4168 pi->asi = RV770_ASI_DFLT;
4169 pi->pasi = CYPRESS_HASI_DFLT;
4170 pi->vrc = CYPRESS_VRC_DFLT;
4172 pi->power_gating = false;
4174 pi->gfx_clock_gating = true;
4176 pi->mg_clock_gating = true;
4177 pi->mgcgtssm = true;
4178 eg_pi->ls_clock_gating = false;
4179 eg_pi->sclk_deep_sleep = false;
4181 pi->dynamic_pcie_gen2 = true;
4183 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4184 pi->thermal_protection = true;
4186 pi->thermal_protection = false;
4188 pi->display_gap = true;
4194 eg_pi->dynamic_ac_timing = true;
4197 eg_pi->light_sleep = true;
4198 eg_pi->memory_transition = true;
4199 #if defined(CONFIG_ACPI)
4200 eg_pi->pcie_performance_request =
4201 radeon_acpi_is_pcie_performance_request_supported(rdev);
4203 eg_pi->pcie_performance_request = false;
4206 eg_pi->dll_default_on = false;
4208 eg_pi->sclk_deep_sleep = false;
4210 pi->mclk_stutter_mode_threshold = 0;
4212 pi->sram_end = SMC_RAM_END;
4214 rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4215 rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4216 rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4217 rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4218 rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4219 rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4220 rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4221 rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4223 ni_pi->cac_data.leakage_coefficients.at = 516;
4224 ni_pi->cac_data.leakage_coefficients.bt = 18;
4225 ni_pi->cac_data.leakage_coefficients.av = 51;
4226 ni_pi->cac_data.leakage_coefficients.bv = 2957;
4228 switch (rdev->ddev->pci_device) {
4234 ni_pi->cac_weights = &cac_weights_cayman_xt;
4241 ni_pi->cac_weights = &cac_weights_cayman_pro;
4248 ni_pi->cac_weights = &cac_weights_cayman_le;
4252 if (ni_pi->cac_weights->enable_power_containment_by_default) {
4253 ni_pi->enable_power_containment = true;
4254 ni_pi->enable_cac = true;
4255 ni_pi->enable_sq_ramping = true;
4257 ni_pi->enable_power_containment = false;
4258 ni_pi->enable_cac = false;
4259 ni_pi->enable_sq_ramping = false;
4262 ni_pi->driver_calculate_cac_leakage = false;
4263 ni_pi->cac_configuration_required = true;
4265 if (ni_pi->cac_configuration_required) {
4266 ni_pi->support_cac_long_term_average = true;
4267 ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4268 ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4270 ni_pi->support_cac_long_term_average = false;
4271 ni_pi->lta_window_size = 0;
4272 ni_pi->lts_truncate = 0;
4275 ni_pi->use_power_boost_limit = true;
4280 void ni_dpm_fini(struct radeon_device *rdev)
4284 for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4285 kfree(rdev->pm.dpm.ps[i].ps_priv);
4287 kfree(rdev->pm.dpm.ps);
4288 kfree(rdev->pm.dpm.priv);
4289 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4290 r600_free_extended_power_table(rdev);
4293 void ni_dpm_print_power_state(struct radeon_device *rdev,
4294 struct radeon_ps *rps)
4296 struct ni_ps *ps = ni_get_ps(rps);
4297 struct rv7xx_pl *pl;
4300 r600_dpm_print_class_info(rps->class, rps->class2);
4301 r600_dpm_print_cap_info(rps->caps);
4302 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4303 for (i = 0; i < ps->performance_level_count; i++) {
4304 pl = &ps->performance_levels[i];
4305 if (rdev->family >= CHIP_TAHITI)
4306 printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4307 i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4309 printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
4310 i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4312 r600_dpm_print_ps_status(rdev, rps);
4315 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4318 struct radeon_ps *rps = rdev->pm.dpm.current_ps;
4319 struct ni_ps *ps = ni_get_ps(rps);
4320 struct rv7xx_pl *pl;
4322 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4323 CURRENT_STATE_INDEX_SHIFT;
4325 if (current_index >= ps->performance_level_count) {
4326 seq_printf(m, "invalid dpm profile %d\n", current_index);
4328 pl = &ps->performance_levels[current_index];
4329 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4330 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
4331 current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4335 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4337 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4338 struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4341 return requested_state->performance_levels[0].sclk;
4343 return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4346 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4348 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4349 struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4352 return requested_state->performance_levels[0].mclk;
4354 return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;