2 * Copyright 2012 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include "radeon_asic.h"
31 #include <linux/math64.h>
32 #include <linux/seq_file.h>
34 #define MC_CG_ARB_FREQ_F0 0x0a
35 #define MC_CG_ARB_FREQ_F1 0x0b
36 #define MC_CG_ARB_FREQ_F2 0x0c
37 #define MC_CG_ARB_FREQ_F3 0x0d
39 #define SMC_RAM_END 0xC000
41 static const struct ni_cac_weights cac_weights_cayman_xt =
105 { 0, 0, 0, 0, 0, 0, 0, 0 },
110 static const struct ni_cac_weights cac_weights_cayman_pro =
174 { 0, 0, 0, 0, 0, 0, 0, 0 },
179 static const struct ni_cac_weights cac_weights_cayman_le =
243 { 0, 0, 0, 0, 0, 0, 0, 0 },
248 #define NISLANDS_MGCG_SEQUENCE 300
250 static const u32 cayman_cgcg_cgls_default[] =
252 0x000008f8, 0x00000010, 0xffffffff,
253 0x000008fc, 0x00000000, 0xffffffff,
254 0x000008f8, 0x00000011, 0xffffffff,
255 0x000008fc, 0x00000000, 0xffffffff,
256 0x000008f8, 0x00000012, 0xffffffff,
257 0x000008fc, 0x00000000, 0xffffffff,
258 0x000008f8, 0x00000013, 0xffffffff,
259 0x000008fc, 0x00000000, 0xffffffff,
260 0x000008f8, 0x00000014, 0xffffffff,
261 0x000008fc, 0x00000000, 0xffffffff,
262 0x000008f8, 0x00000015, 0xffffffff,
263 0x000008fc, 0x00000000, 0xffffffff,
264 0x000008f8, 0x00000016, 0xffffffff,
265 0x000008fc, 0x00000000, 0xffffffff,
266 0x000008f8, 0x00000017, 0xffffffff,
267 0x000008fc, 0x00000000, 0xffffffff,
268 0x000008f8, 0x00000018, 0xffffffff,
269 0x000008fc, 0x00000000, 0xffffffff,
270 0x000008f8, 0x00000019, 0xffffffff,
271 0x000008fc, 0x00000000, 0xffffffff,
272 0x000008f8, 0x0000001a, 0xffffffff,
273 0x000008fc, 0x00000000, 0xffffffff,
274 0x000008f8, 0x0000001b, 0xffffffff,
275 0x000008fc, 0x00000000, 0xffffffff,
276 0x000008f8, 0x00000020, 0xffffffff,
277 0x000008fc, 0x00000000, 0xffffffff,
278 0x000008f8, 0x00000021, 0xffffffff,
279 0x000008fc, 0x00000000, 0xffffffff,
280 0x000008f8, 0x00000022, 0xffffffff,
281 0x000008fc, 0x00000000, 0xffffffff,
282 0x000008f8, 0x00000023, 0xffffffff,
283 0x000008fc, 0x00000000, 0xffffffff,
284 0x000008f8, 0x00000024, 0xffffffff,
285 0x000008fc, 0x00000000, 0xffffffff,
286 0x000008f8, 0x00000025, 0xffffffff,
287 0x000008fc, 0x00000000, 0xffffffff,
288 0x000008f8, 0x00000026, 0xffffffff,
289 0x000008fc, 0x00000000, 0xffffffff,
290 0x000008f8, 0x00000027, 0xffffffff,
291 0x000008fc, 0x00000000, 0xffffffff,
292 0x000008f8, 0x00000028, 0xffffffff,
293 0x000008fc, 0x00000000, 0xffffffff,
294 0x000008f8, 0x00000029, 0xffffffff,
295 0x000008fc, 0x00000000, 0xffffffff,
296 0x000008f8, 0x0000002a, 0xffffffff,
297 0x000008fc, 0x00000000, 0xffffffff,
298 0x000008f8, 0x0000002b, 0xffffffff,
299 0x000008fc, 0x00000000, 0xffffffff
301 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
303 static const u32 cayman_cgcg_cgls_disable[] =
305 0x000008f8, 0x00000010, 0xffffffff,
306 0x000008fc, 0xffffffff, 0xffffffff,
307 0x000008f8, 0x00000011, 0xffffffff,
308 0x000008fc, 0xffffffff, 0xffffffff,
309 0x000008f8, 0x00000012, 0xffffffff,
310 0x000008fc, 0xffffffff, 0xffffffff,
311 0x000008f8, 0x00000013, 0xffffffff,
312 0x000008fc, 0xffffffff, 0xffffffff,
313 0x000008f8, 0x00000014, 0xffffffff,
314 0x000008fc, 0xffffffff, 0xffffffff,
315 0x000008f8, 0x00000015, 0xffffffff,
316 0x000008fc, 0xffffffff, 0xffffffff,
317 0x000008f8, 0x00000016, 0xffffffff,
318 0x000008fc, 0xffffffff, 0xffffffff,
319 0x000008f8, 0x00000017, 0xffffffff,
320 0x000008fc, 0xffffffff, 0xffffffff,
321 0x000008f8, 0x00000018, 0xffffffff,
322 0x000008fc, 0xffffffff, 0xffffffff,
323 0x000008f8, 0x00000019, 0xffffffff,
324 0x000008fc, 0xffffffff, 0xffffffff,
325 0x000008f8, 0x0000001a, 0xffffffff,
326 0x000008fc, 0xffffffff, 0xffffffff,
327 0x000008f8, 0x0000001b, 0xffffffff,
328 0x000008fc, 0xffffffff, 0xffffffff,
329 0x000008f8, 0x00000020, 0xffffffff,
330 0x000008fc, 0x00000000, 0xffffffff,
331 0x000008f8, 0x00000021, 0xffffffff,
332 0x000008fc, 0x00000000, 0xffffffff,
333 0x000008f8, 0x00000022, 0xffffffff,
334 0x000008fc, 0x00000000, 0xffffffff,
335 0x000008f8, 0x00000023, 0xffffffff,
336 0x000008fc, 0x00000000, 0xffffffff,
337 0x000008f8, 0x00000024, 0xffffffff,
338 0x000008fc, 0x00000000, 0xffffffff,
339 0x000008f8, 0x00000025, 0xffffffff,
340 0x000008fc, 0x00000000, 0xffffffff,
341 0x000008f8, 0x00000026, 0xffffffff,
342 0x000008fc, 0x00000000, 0xffffffff,
343 0x000008f8, 0x00000027, 0xffffffff,
344 0x000008fc, 0x00000000, 0xffffffff,
345 0x000008f8, 0x00000028, 0xffffffff,
346 0x000008fc, 0x00000000, 0xffffffff,
347 0x000008f8, 0x00000029, 0xffffffff,
348 0x000008fc, 0x00000000, 0xffffffff,
349 0x000008f8, 0x0000002a, 0xffffffff,
350 0x000008fc, 0x00000000, 0xffffffff,
351 0x000008f8, 0x0000002b, 0xffffffff,
352 0x000008fc, 0x00000000, 0xffffffff,
353 0x00000644, 0x000f7902, 0x001f4180,
354 0x00000644, 0x000f3802, 0x001f4180
356 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
358 static const u32 cayman_cgcg_cgls_enable[] =
360 0x00000644, 0x000f7882, 0x001f4080,
361 0x000008f8, 0x00000010, 0xffffffff,
362 0x000008fc, 0x00000000, 0xffffffff,
363 0x000008f8, 0x00000011, 0xffffffff,
364 0x000008fc, 0x00000000, 0xffffffff,
365 0x000008f8, 0x00000012, 0xffffffff,
366 0x000008fc, 0x00000000, 0xffffffff,
367 0x000008f8, 0x00000013, 0xffffffff,
368 0x000008fc, 0x00000000, 0xffffffff,
369 0x000008f8, 0x00000014, 0xffffffff,
370 0x000008fc, 0x00000000, 0xffffffff,
371 0x000008f8, 0x00000015, 0xffffffff,
372 0x000008fc, 0x00000000, 0xffffffff,
373 0x000008f8, 0x00000016, 0xffffffff,
374 0x000008fc, 0x00000000, 0xffffffff,
375 0x000008f8, 0x00000017, 0xffffffff,
376 0x000008fc, 0x00000000, 0xffffffff,
377 0x000008f8, 0x00000018, 0xffffffff,
378 0x000008fc, 0x00000000, 0xffffffff,
379 0x000008f8, 0x00000019, 0xffffffff,
380 0x000008fc, 0x00000000, 0xffffffff,
381 0x000008f8, 0x0000001a, 0xffffffff,
382 0x000008fc, 0x00000000, 0xffffffff,
383 0x000008f8, 0x0000001b, 0xffffffff,
384 0x000008fc, 0x00000000, 0xffffffff,
385 0x000008f8, 0x00000020, 0xffffffff,
386 0x000008fc, 0xffffffff, 0xffffffff,
387 0x000008f8, 0x00000021, 0xffffffff,
388 0x000008fc, 0xffffffff, 0xffffffff,
389 0x000008f8, 0x00000022, 0xffffffff,
390 0x000008fc, 0xffffffff, 0xffffffff,
391 0x000008f8, 0x00000023, 0xffffffff,
392 0x000008fc, 0xffffffff, 0xffffffff,
393 0x000008f8, 0x00000024, 0xffffffff,
394 0x000008fc, 0xffffffff, 0xffffffff,
395 0x000008f8, 0x00000025, 0xffffffff,
396 0x000008fc, 0xffffffff, 0xffffffff,
397 0x000008f8, 0x00000026, 0xffffffff,
398 0x000008fc, 0xffffffff, 0xffffffff,
399 0x000008f8, 0x00000027, 0xffffffff,
400 0x000008fc, 0xffffffff, 0xffffffff,
401 0x000008f8, 0x00000028, 0xffffffff,
402 0x000008fc, 0xffffffff, 0xffffffff,
403 0x000008f8, 0x00000029, 0xffffffff,
404 0x000008fc, 0xffffffff, 0xffffffff,
405 0x000008f8, 0x0000002a, 0xffffffff,
406 0x000008fc, 0xffffffff, 0xffffffff,
407 0x000008f8, 0x0000002b, 0xffffffff,
408 0x000008fc, 0xffffffff, 0xffffffff
410 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
412 static const u32 cayman_mgcg_default[] =
414 0x0000802c, 0xc0000000, 0xffffffff,
415 0x00003fc4, 0xc0000000, 0xffffffff,
416 0x00005448, 0x00000100, 0xffffffff,
417 0x000055e4, 0x00000100, 0xffffffff,
418 0x0000160c, 0x00000100, 0xffffffff,
419 0x00008984, 0x06000100, 0xffffffff,
420 0x0000c164, 0x00000100, 0xffffffff,
421 0x00008a18, 0x00000100, 0xffffffff,
422 0x0000897c, 0x06000100, 0xffffffff,
423 0x00008b28, 0x00000100, 0xffffffff,
424 0x00009144, 0x00800200, 0xffffffff,
425 0x00009a60, 0x00000100, 0xffffffff,
426 0x00009868, 0x00000100, 0xffffffff,
427 0x00008d58, 0x00000100, 0xffffffff,
428 0x00009510, 0x00000100, 0xffffffff,
429 0x0000949c, 0x00000100, 0xffffffff,
430 0x00009654, 0x00000100, 0xffffffff,
431 0x00009030, 0x00000100, 0xffffffff,
432 0x00009034, 0x00000100, 0xffffffff,
433 0x00009038, 0x00000100, 0xffffffff,
434 0x0000903c, 0x00000100, 0xffffffff,
435 0x00009040, 0x00000100, 0xffffffff,
436 0x0000a200, 0x00000100, 0xffffffff,
437 0x0000a204, 0x00000100, 0xffffffff,
438 0x0000a208, 0x00000100, 0xffffffff,
439 0x0000a20c, 0x00000100, 0xffffffff,
440 0x00009744, 0x00000100, 0xffffffff,
441 0x00003f80, 0x00000100, 0xffffffff,
442 0x0000a210, 0x00000100, 0xffffffff,
443 0x0000a214, 0x00000100, 0xffffffff,
444 0x000004d8, 0x00000100, 0xffffffff,
445 0x00009664, 0x00000100, 0xffffffff,
446 0x00009698, 0x00000100, 0xffffffff,
447 0x000004d4, 0x00000200, 0xffffffff,
448 0x000004d0, 0x00000000, 0xffffffff,
449 0x000030cc, 0x00000104, 0xffffffff,
450 0x0000d0c0, 0x00000100, 0xffffffff,
451 0x0000d8c0, 0x00000100, 0xffffffff,
452 0x0000802c, 0x40000000, 0xffffffff,
453 0x00003fc4, 0x40000000, 0xffffffff,
454 0x0000915c, 0x00010000, 0xffffffff,
455 0x00009160, 0x00030002, 0xffffffff,
456 0x00009164, 0x00050004, 0xffffffff,
457 0x00009168, 0x00070006, 0xffffffff,
458 0x00009178, 0x00070000, 0xffffffff,
459 0x0000917c, 0x00030002, 0xffffffff,
460 0x00009180, 0x00050004, 0xffffffff,
461 0x0000918c, 0x00010006, 0xffffffff,
462 0x00009190, 0x00090008, 0xffffffff,
463 0x00009194, 0x00070000, 0xffffffff,
464 0x00009198, 0x00030002, 0xffffffff,
465 0x0000919c, 0x00050004, 0xffffffff,
466 0x000091a8, 0x00010006, 0xffffffff,
467 0x000091ac, 0x00090008, 0xffffffff,
468 0x000091b0, 0x00070000, 0xffffffff,
469 0x000091b4, 0x00030002, 0xffffffff,
470 0x000091b8, 0x00050004, 0xffffffff,
471 0x000091c4, 0x00010006, 0xffffffff,
472 0x000091c8, 0x00090008, 0xffffffff,
473 0x000091cc, 0x00070000, 0xffffffff,
474 0x000091d0, 0x00030002, 0xffffffff,
475 0x000091d4, 0x00050004, 0xffffffff,
476 0x000091e0, 0x00010006, 0xffffffff,
477 0x000091e4, 0x00090008, 0xffffffff,
478 0x000091e8, 0x00000000, 0xffffffff,
479 0x000091ec, 0x00070000, 0xffffffff,
480 0x000091f0, 0x00030002, 0xffffffff,
481 0x000091f4, 0x00050004, 0xffffffff,
482 0x00009200, 0x00010006, 0xffffffff,
483 0x00009204, 0x00090008, 0xffffffff,
484 0x00009208, 0x00070000, 0xffffffff,
485 0x0000920c, 0x00030002, 0xffffffff,
486 0x00009210, 0x00050004, 0xffffffff,
487 0x0000921c, 0x00010006, 0xffffffff,
488 0x00009220, 0x00090008, 0xffffffff,
489 0x00009224, 0x00070000, 0xffffffff,
490 0x00009228, 0x00030002, 0xffffffff,
491 0x0000922c, 0x00050004, 0xffffffff,
492 0x00009238, 0x00010006, 0xffffffff,
493 0x0000923c, 0x00090008, 0xffffffff,
494 0x00009240, 0x00070000, 0xffffffff,
495 0x00009244, 0x00030002, 0xffffffff,
496 0x00009248, 0x00050004, 0xffffffff,
497 0x00009254, 0x00010006, 0xffffffff,
498 0x00009258, 0x00090008, 0xffffffff,
499 0x0000925c, 0x00070000, 0xffffffff,
500 0x00009260, 0x00030002, 0xffffffff,
501 0x00009264, 0x00050004, 0xffffffff,
502 0x00009270, 0x00010006, 0xffffffff,
503 0x00009274, 0x00090008, 0xffffffff,
504 0x00009278, 0x00070000, 0xffffffff,
505 0x0000927c, 0x00030002, 0xffffffff,
506 0x00009280, 0x00050004, 0xffffffff,
507 0x0000928c, 0x00010006, 0xffffffff,
508 0x00009290, 0x00090008, 0xffffffff,
509 0x000092a8, 0x00070000, 0xffffffff,
510 0x000092ac, 0x00030002, 0xffffffff,
511 0x000092b0, 0x00050004, 0xffffffff,
512 0x000092bc, 0x00010006, 0xffffffff,
513 0x000092c0, 0x00090008, 0xffffffff,
514 0x000092c4, 0x00070000, 0xffffffff,
515 0x000092c8, 0x00030002, 0xffffffff,
516 0x000092cc, 0x00050004, 0xffffffff,
517 0x000092d8, 0x00010006, 0xffffffff,
518 0x000092dc, 0x00090008, 0xffffffff,
519 0x00009294, 0x00000000, 0xffffffff,
520 0x0000802c, 0x40010000, 0xffffffff,
521 0x00003fc4, 0x40010000, 0xffffffff,
522 0x0000915c, 0x00010000, 0xffffffff,
523 0x00009160, 0x00030002, 0xffffffff,
524 0x00009164, 0x00050004, 0xffffffff,
525 0x00009168, 0x00070006, 0xffffffff,
526 0x00009178, 0x00070000, 0xffffffff,
527 0x0000917c, 0x00030002, 0xffffffff,
528 0x00009180, 0x00050004, 0xffffffff,
529 0x0000918c, 0x00010006, 0xffffffff,
530 0x00009190, 0x00090008, 0xffffffff,
531 0x00009194, 0x00070000, 0xffffffff,
532 0x00009198, 0x00030002, 0xffffffff,
533 0x0000919c, 0x00050004, 0xffffffff,
534 0x000091a8, 0x00010006, 0xffffffff,
535 0x000091ac, 0x00090008, 0xffffffff,
536 0x000091b0, 0x00070000, 0xffffffff,
537 0x000091b4, 0x00030002, 0xffffffff,
538 0x000091b8, 0x00050004, 0xffffffff,
539 0x000091c4, 0x00010006, 0xffffffff,
540 0x000091c8, 0x00090008, 0xffffffff,
541 0x000091cc, 0x00070000, 0xffffffff,
542 0x000091d0, 0x00030002, 0xffffffff,
543 0x000091d4, 0x00050004, 0xffffffff,
544 0x000091e0, 0x00010006, 0xffffffff,
545 0x000091e4, 0x00090008, 0xffffffff,
546 0x000091e8, 0x00000000, 0xffffffff,
547 0x000091ec, 0x00070000, 0xffffffff,
548 0x000091f0, 0x00030002, 0xffffffff,
549 0x000091f4, 0x00050004, 0xffffffff,
550 0x00009200, 0x00010006, 0xffffffff,
551 0x00009204, 0x00090008, 0xffffffff,
552 0x00009208, 0x00070000, 0xffffffff,
553 0x0000920c, 0x00030002, 0xffffffff,
554 0x00009210, 0x00050004, 0xffffffff,
555 0x0000921c, 0x00010006, 0xffffffff,
556 0x00009220, 0x00090008, 0xffffffff,
557 0x00009224, 0x00070000, 0xffffffff,
558 0x00009228, 0x00030002, 0xffffffff,
559 0x0000922c, 0x00050004, 0xffffffff,
560 0x00009238, 0x00010006, 0xffffffff,
561 0x0000923c, 0x00090008, 0xffffffff,
562 0x00009240, 0x00070000, 0xffffffff,
563 0x00009244, 0x00030002, 0xffffffff,
564 0x00009248, 0x00050004, 0xffffffff,
565 0x00009254, 0x00010006, 0xffffffff,
566 0x00009258, 0x00090008, 0xffffffff,
567 0x0000925c, 0x00070000, 0xffffffff,
568 0x00009260, 0x00030002, 0xffffffff,
569 0x00009264, 0x00050004, 0xffffffff,
570 0x00009270, 0x00010006, 0xffffffff,
571 0x00009274, 0x00090008, 0xffffffff,
572 0x00009278, 0x00070000, 0xffffffff,
573 0x0000927c, 0x00030002, 0xffffffff,
574 0x00009280, 0x00050004, 0xffffffff,
575 0x0000928c, 0x00010006, 0xffffffff,
576 0x00009290, 0x00090008, 0xffffffff,
577 0x000092a8, 0x00070000, 0xffffffff,
578 0x000092ac, 0x00030002, 0xffffffff,
579 0x000092b0, 0x00050004, 0xffffffff,
580 0x000092bc, 0x00010006, 0xffffffff,
581 0x000092c0, 0x00090008, 0xffffffff,
582 0x000092c4, 0x00070000, 0xffffffff,
583 0x000092c8, 0x00030002, 0xffffffff,
584 0x000092cc, 0x00050004, 0xffffffff,
585 0x000092d8, 0x00010006, 0xffffffff,
586 0x000092dc, 0x00090008, 0xffffffff,
587 0x00009294, 0x00000000, 0xffffffff,
588 0x0000802c, 0xc0000000, 0xffffffff,
589 0x00003fc4, 0xc0000000, 0xffffffff,
590 0x000008f8, 0x00000010, 0xffffffff,
591 0x000008fc, 0x00000000, 0xffffffff,
592 0x000008f8, 0x00000011, 0xffffffff,
593 0x000008fc, 0x00000000, 0xffffffff,
594 0x000008f8, 0x00000012, 0xffffffff,
595 0x000008fc, 0x00000000, 0xffffffff,
596 0x000008f8, 0x00000013, 0xffffffff,
597 0x000008fc, 0x00000000, 0xffffffff,
598 0x000008f8, 0x00000014, 0xffffffff,
599 0x000008fc, 0x00000000, 0xffffffff,
600 0x000008f8, 0x00000015, 0xffffffff,
601 0x000008fc, 0x00000000, 0xffffffff,
602 0x000008f8, 0x00000016, 0xffffffff,
603 0x000008fc, 0x00000000, 0xffffffff,
604 0x000008f8, 0x00000017, 0xffffffff,
605 0x000008fc, 0x00000000, 0xffffffff,
606 0x000008f8, 0x00000018, 0xffffffff,
607 0x000008fc, 0x00000000, 0xffffffff,
608 0x000008f8, 0x00000019, 0xffffffff,
609 0x000008fc, 0x00000000, 0xffffffff,
610 0x000008f8, 0x0000001a, 0xffffffff,
611 0x000008fc, 0x00000000, 0xffffffff,
612 0x000008f8, 0x0000001b, 0xffffffff,
613 0x000008fc, 0x00000000, 0xffffffff
615 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
617 static const u32 cayman_mgcg_disable[] =
619 0x0000802c, 0xc0000000, 0xffffffff,
620 0x000008f8, 0x00000000, 0xffffffff,
621 0x000008fc, 0xffffffff, 0xffffffff,
622 0x000008f8, 0x00000001, 0xffffffff,
623 0x000008fc, 0xffffffff, 0xffffffff,
624 0x000008f8, 0x00000002, 0xffffffff,
625 0x000008fc, 0xffffffff, 0xffffffff,
626 0x000008f8, 0x00000003, 0xffffffff,
627 0x000008fc, 0xffffffff, 0xffffffff,
628 0x00009150, 0x00600000, 0xffffffff
630 #define CAYMAN_MGCG_DISABLE_LENGTH sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
632 static const u32 cayman_mgcg_enable[] =
634 0x0000802c, 0xc0000000, 0xffffffff,
635 0x000008f8, 0x00000000, 0xffffffff,
636 0x000008fc, 0x00000000, 0xffffffff,
637 0x000008f8, 0x00000001, 0xffffffff,
638 0x000008fc, 0x00000000, 0xffffffff,
639 0x000008f8, 0x00000002, 0xffffffff,
640 0x000008fc, 0x00600000, 0xffffffff,
641 0x000008f8, 0x00000003, 0xffffffff,
642 0x000008fc, 0x00000000, 0xffffffff,
643 0x00009150, 0x96944200, 0xffffffff
646 #define CAYMAN_MGCG_ENABLE_LENGTH sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
648 #define NISLANDS_SYSLS_SEQUENCE 100
650 static const u32 cayman_sysls_default[] =
652 /* Register, Value, Mask bits */
653 0x000055e8, 0x00000000, 0xffffffff,
654 0x0000d0bc, 0x00000000, 0xffffffff,
655 0x0000d8bc, 0x00000000, 0xffffffff,
656 0x000015c0, 0x000c1401, 0xffffffff,
657 0x0000264c, 0x000c0400, 0xffffffff,
658 0x00002648, 0x000c0400, 0xffffffff,
659 0x00002650, 0x000c0400, 0xffffffff,
660 0x000020b8, 0x000c0400, 0xffffffff,
661 0x000020bc, 0x000c0400, 0xffffffff,
662 0x000020c0, 0x000c0c80, 0xffffffff,
663 0x0000f4a0, 0x000000c0, 0xffffffff,
664 0x0000f4a4, 0x00680fff, 0xffffffff,
665 0x00002f50, 0x00000404, 0xffffffff,
666 0x000004c8, 0x00000001, 0xffffffff,
667 0x000064ec, 0x00000000, 0xffffffff,
668 0x00000c7c, 0x00000000, 0xffffffff,
669 0x00008dfc, 0x00000000, 0xffffffff
671 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
673 static const u32 cayman_sysls_disable[] =
675 /* Register, Value, Mask bits */
676 0x0000d0c0, 0x00000000, 0xffffffff,
677 0x0000d8c0, 0x00000000, 0xffffffff,
678 0x000055e8, 0x00000000, 0xffffffff,
679 0x0000d0bc, 0x00000000, 0xffffffff,
680 0x0000d8bc, 0x00000000, 0xffffffff,
681 0x000015c0, 0x00041401, 0xffffffff,
682 0x0000264c, 0x00040400, 0xffffffff,
683 0x00002648, 0x00040400, 0xffffffff,
684 0x00002650, 0x00040400, 0xffffffff,
685 0x000020b8, 0x00040400, 0xffffffff,
686 0x000020bc, 0x00040400, 0xffffffff,
687 0x000020c0, 0x00040c80, 0xffffffff,
688 0x0000f4a0, 0x000000c0, 0xffffffff,
689 0x0000f4a4, 0x00680000, 0xffffffff,
690 0x00002f50, 0x00000404, 0xffffffff,
691 0x000004c8, 0x00000001, 0xffffffff,
692 0x000064ec, 0x00007ffd, 0xffffffff,
693 0x00000c7c, 0x0000ff00, 0xffffffff,
694 0x00008dfc, 0x0000007f, 0xffffffff
696 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
698 static const u32 cayman_sysls_enable[] =
700 /* Register, Value, Mask bits */
701 0x000055e8, 0x00000001, 0xffffffff,
702 0x0000d0bc, 0x00000100, 0xffffffff,
703 0x0000d8bc, 0x00000100, 0xffffffff,
704 0x000015c0, 0x000c1401, 0xffffffff,
705 0x0000264c, 0x000c0400, 0xffffffff,
706 0x00002648, 0x000c0400, 0xffffffff,
707 0x00002650, 0x000c0400, 0xffffffff,
708 0x000020b8, 0x000c0400, 0xffffffff,
709 0x000020bc, 0x000c0400, 0xffffffff,
710 0x000020c0, 0x000c0c80, 0xffffffff,
711 0x0000f4a0, 0x000000c0, 0xffffffff,
712 0x0000f4a4, 0x00680fff, 0xffffffff,
713 0x00002f50, 0x00000903, 0xffffffff,
714 0x000004c8, 0x00000000, 0xffffffff,
715 0x000064ec, 0x00000000, 0xffffffff,
716 0x00000c7c, 0x00000000, 0xffffffff,
717 0x00008dfc, 0x00000000, 0xffffffff
719 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
721 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
722 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
723 struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
724 struct ni_ps *ni_get_ps(struct radeon_ps *rps);
725 void ni_dpm_reset_asic(struct radeon_device *rdev);
727 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
729 struct ni_power_info *pi = rdev->pm.dpm.priv;
734 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
736 struct ni_ps *ps = rps->ps_priv;
741 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
746 s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
748 i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
749 vddc = div64_s64(drm_int2fixp(v), 1000);
750 temperature = div64_s64(drm_int2fixp(t), 1000);
752 kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
753 drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
754 kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
755 drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
757 leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
759 *leakage = drm_fixp2int(leakage_w * 1000);
762 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
763 const struct ni_leakage_coeffients *coeff,
769 ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
772 bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
774 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
775 u32 vblank_time = r600_dpm_get_vblank_time(rdev);
776 /* we never hit the non-gddr5 limit so disable it */
777 u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
779 if (vblank_time < switch_limit)
786 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
787 struct radeon_ps *rps)
789 struct ni_ps *ps = ni_get_ps(rps);
790 struct radeon_clock_and_voltage_limits *max_limits;
791 bool disable_mclk_switching;
794 u32 max_sclk_vddc, max_mclk_vddci, max_mclk_vddc;
797 if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
798 ni_dpm_vblank_too_short(rdev))
799 disable_mclk_switching = true;
801 disable_mclk_switching = false;
803 if (rdev->pm.dpm.ac_power)
804 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
806 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
808 if (rdev->pm.dpm.ac_power == false) {
809 for (i = 0; i < ps->performance_level_count; i++) {
810 if (ps->performance_levels[i].mclk > max_limits->mclk)
811 ps->performance_levels[i].mclk = max_limits->mclk;
812 if (ps->performance_levels[i].sclk > max_limits->sclk)
813 ps->performance_levels[i].sclk = max_limits->sclk;
814 if (ps->performance_levels[i].vddc > max_limits->vddc)
815 ps->performance_levels[i].vddc = max_limits->vddc;
816 if (ps->performance_levels[i].vddci > max_limits->vddci)
817 ps->performance_levels[i].vddci = max_limits->vddci;
821 /* limit clocks to max supported clocks based on voltage dependency tables */
822 btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
824 btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
826 btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
829 for (i = 0; i < ps->performance_level_count; i++) {
831 if (ps->performance_levels[i].sclk > max_sclk_vddc)
832 ps->performance_levels[i].sclk = max_sclk_vddc;
834 if (max_mclk_vddci) {
835 if (ps->performance_levels[i].mclk > max_mclk_vddci)
836 ps->performance_levels[i].mclk = max_mclk_vddci;
839 if (ps->performance_levels[i].mclk > max_mclk_vddc)
840 ps->performance_levels[i].mclk = max_mclk_vddc;
844 /* XXX validate the min clocks required for display */
846 /* adjust low state */
847 if (disable_mclk_switching) {
848 ps->performance_levels[0].mclk =
849 ps->performance_levels[ps->performance_level_count - 1].mclk;
850 ps->performance_levels[0].vddci =
851 ps->performance_levels[ps->performance_level_count - 1].vddci;
854 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
855 &ps->performance_levels[0].sclk,
856 &ps->performance_levels[0].mclk);
858 for (i = 1; i < ps->performance_level_count; i++) {
859 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
860 ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
861 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
862 ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
865 /* adjust remaining states */
866 if (disable_mclk_switching) {
867 mclk = ps->performance_levels[0].mclk;
868 vddci = ps->performance_levels[0].vddci;
869 for (i = 1; i < ps->performance_level_count; i++) {
870 if (mclk < ps->performance_levels[i].mclk)
871 mclk = ps->performance_levels[i].mclk;
872 if (vddci < ps->performance_levels[i].vddci)
873 vddci = ps->performance_levels[i].vddci;
875 for (i = 0; i < ps->performance_level_count; i++) {
876 ps->performance_levels[i].mclk = mclk;
877 ps->performance_levels[i].vddci = vddci;
880 for (i = 1; i < ps->performance_level_count; i++) {
881 if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
882 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
883 if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
884 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
888 for (i = 1; i < ps->performance_level_count; i++)
889 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
890 &ps->performance_levels[i].sclk,
891 &ps->performance_levels[i].mclk);
893 for (i = 0; i < ps->performance_level_count; i++)
894 btc_adjust_clock_combinations(rdev, max_limits,
895 &ps->performance_levels[i]);
897 for (i = 0; i < ps->performance_level_count; i++) {
898 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
899 ps->performance_levels[i].sclk,
900 max_limits->vddc, &ps->performance_levels[i].vddc);
901 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
902 ps->performance_levels[i].mclk,
903 max_limits->vddci, &ps->performance_levels[i].vddci);
904 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
905 ps->performance_levels[i].mclk,
906 max_limits->vddc, &ps->performance_levels[i].vddc);
907 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
908 rdev->clock.current_dispclk,
909 max_limits->vddc, &ps->performance_levels[i].vddc);
912 for (i = 0; i < ps->performance_level_count; i++) {
913 btc_apply_voltage_delta_rules(rdev,
914 max_limits->vddc, max_limits->vddci,
915 &ps->performance_levels[i].vddc,
916 &ps->performance_levels[i].vddci);
919 ps->dc_compatible = true;
920 for (i = 0; i < ps->performance_level_count; i++) {
921 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
922 ps->dc_compatible = false;
924 if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
925 ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
929 static void ni_cg_clockgating_default(struct radeon_device *rdev)
932 const u32 *ps = NULL;
934 ps = (const u32 *)&cayman_cgcg_cgls_default;
935 count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
937 btc_program_mgcg_hw_sequence(rdev, ps, count);
940 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
944 const u32 *ps = NULL;
947 ps = (const u32 *)&cayman_cgcg_cgls_enable;
948 count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
950 ps = (const u32 *)&cayman_cgcg_cgls_disable;
951 count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
954 btc_program_mgcg_hw_sequence(rdev, ps, count);
957 static void ni_mg_clockgating_default(struct radeon_device *rdev)
960 const u32 *ps = NULL;
962 ps = (const u32 *)&cayman_mgcg_default;
963 count = CAYMAN_MGCG_DEFAULT_LENGTH;
965 btc_program_mgcg_hw_sequence(rdev, ps, count);
968 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
972 const u32 *ps = NULL;
975 ps = (const u32 *)&cayman_mgcg_enable;
976 count = CAYMAN_MGCG_ENABLE_LENGTH;
978 ps = (const u32 *)&cayman_mgcg_disable;
979 count = CAYMAN_MGCG_DISABLE_LENGTH;
982 btc_program_mgcg_hw_sequence(rdev, ps, count);
985 static void ni_ls_clockgating_default(struct radeon_device *rdev)
988 const u32 *ps = NULL;
990 ps = (const u32 *)&cayman_sysls_default;
991 count = CAYMAN_SYSLS_DEFAULT_LENGTH;
993 btc_program_mgcg_hw_sequence(rdev, ps, count);
996 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
1000 const u32 *ps = NULL;
1003 ps = (const u32 *)&cayman_sysls_enable;
1004 count = CAYMAN_SYSLS_ENABLE_LENGTH;
1006 ps = (const u32 *)&cayman_sysls_disable;
1007 count = CAYMAN_SYSLS_DISABLE_LENGTH;
1010 btc_program_mgcg_hw_sequence(rdev, ps, count);
1014 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
1015 struct radeon_clock_voltage_dependency_table *table)
1017 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1021 for (i = 0; i < table->count; i++) {
1022 if (0xff01 == table->entries[i].v) {
1023 if (pi->max_vddc == 0)
1025 table->entries[i].v = pi->max_vddc;
1032 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1036 ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1037 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1039 ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1040 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1044 static void ni_stop_dpm(struct radeon_device *rdev)
1046 WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1050 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1054 return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1061 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1062 PPSMC_Msg msg, u32 parameter)
1064 WREG32(SMC_SCRATCH0, parameter);
1065 return rv770_send_msg_to_smc(rdev, msg);
1068 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1070 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1073 return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1077 int ni_dpm_force_performance_level(struct radeon_device *rdev,
1078 enum radeon_dpm_forced_level level)
1080 if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1081 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1084 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1086 } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1087 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1090 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1092 } else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1093 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1096 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1100 rdev->pm.dpm.forced_level = level;
1105 static void ni_stop_smc(struct radeon_device *rdev)
1110 for (i = 0; i < rdev->usec_timeout; i++) {
1111 tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1119 r7xx_stop_smc(rdev);
1122 static int ni_process_firmware_header(struct radeon_device *rdev)
1124 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1125 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1126 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1130 ret = rv770_read_smc_sram_dword(rdev,
1131 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1132 NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1133 &tmp, pi->sram_end);
1138 pi->state_table_start = (u16)tmp;
1140 ret = rv770_read_smc_sram_dword(rdev,
1141 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1142 NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1143 &tmp, pi->sram_end);
1148 pi->soft_regs_start = (u16)tmp;
1150 ret = rv770_read_smc_sram_dword(rdev,
1151 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1152 NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1153 &tmp, pi->sram_end);
1158 eg_pi->mc_reg_table_start = (u16)tmp;
1160 ret = rv770_read_smc_sram_dword(rdev,
1161 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1162 NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1163 &tmp, pi->sram_end);
1168 ni_pi->fan_table_start = (u16)tmp;
1170 ret = rv770_read_smc_sram_dword(rdev,
1171 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1172 NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1173 &tmp, pi->sram_end);
1178 ni_pi->arb_table_start = (u16)tmp;
1180 ret = rv770_read_smc_sram_dword(rdev,
1181 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1182 NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1183 &tmp, pi->sram_end);
1188 ni_pi->cac_table_start = (u16)tmp;
1190 ret = rv770_read_smc_sram_dword(rdev,
1191 NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1192 NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1193 &tmp, pi->sram_end);
1198 ni_pi->spll_table_start = (u16)tmp;
1204 static void ni_read_clock_registers(struct radeon_device *rdev)
1206 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1208 ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1209 ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1210 ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1211 ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1212 ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1213 ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1214 ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1215 ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1216 ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1217 ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1218 ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1219 ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1220 ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1221 ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1225 static int ni_enter_ulp_state(struct radeon_device *rdev)
1227 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1229 if (pi->gfx_clock_gating) {
1230 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1231 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1232 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1233 RREG32(GB_ADDR_CONFIG);
1236 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1237 ~HOST_SMC_MSG_MASK);
1245 static void ni_program_response_times(struct radeon_device *rdev)
1247 u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1248 u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1249 u32 reference_clock;
1251 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1253 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1254 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1256 if (voltage_response_time == 0)
1257 voltage_response_time = 1000;
1259 if (backbias_response_time == 0)
1260 backbias_response_time = 1000;
1262 acpi_delay_time = 15000;
1263 vbi_time_out = 100000;
1265 reference_clock = radeon_get_xclk(rdev);
1267 vddc_dly = (voltage_response_time * reference_clock) / 1600;
1268 bb_dly = (backbias_response_time * reference_clock) / 1600;
1269 acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1270 vbi_dly = (vbi_time_out * reference_clock) / 1600;
1272 mclk_switch_limit = (460 * reference_clock) / 100;
1274 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1275 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1276 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1277 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1278 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1279 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1282 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1283 struct atom_voltage_table *voltage_table,
1284 NISLANDS_SMC_STATETABLE *table)
1288 for (i = 0; i < voltage_table->count; i++) {
1289 table->highSMIO[i] = 0;
1290 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1294 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1295 NISLANDS_SMC_STATETABLE *table)
1297 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1298 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1301 if (eg_pi->vddc_voltage_table.count) {
1302 ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1303 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1304 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1305 cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1307 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1308 if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1309 table->maxVDDCIndexInPPTable = i;
1315 if (eg_pi->vddci_voltage_table.count) {
1316 ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1318 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1319 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1320 cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
1324 static int ni_populate_voltage_value(struct radeon_device *rdev,
1325 struct atom_voltage_table *table,
1327 NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1331 for (i = 0; i < table->count; i++) {
1332 if (value <= table->entries[i].value) {
1333 voltage->index = (u8)i;
1334 voltage->value = cpu_to_be16(table->entries[i].value);
1339 if (i >= table->count)
1345 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1347 NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1349 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1350 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1352 if (!pi->mvdd_control) {
1353 voltage->index = eg_pi->mvdd_high_index;
1354 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1358 if (mclk <= pi->mvdd_split_frequency) {
1359 voltage->index = eg_pi->mvdd_low_index;
1360 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1362 voltage->index = eg_pi->mvdd_high_index;
1363 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1367 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1368 NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1371 if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1372 ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1373 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1375 *std_voltage = be16_to_cpu(voltage->value);
1380 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1381 u16 value, u8 index,
1382 NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1384 voltage->index = index;
1385 voltage->value = cpu_to_be16(value);
1388 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1391 u32 xclk = radeon_get_xclk(rdev);
1392 u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1394 xclk_period = (1000000000UL / xclk);
1395 xclk_period /= 10000UL;
1397 return tmp * xclk_period;
1400 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1402 return (power_in_watts * scaling_factor) << 2;
1405 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1406 struct radeon_ps *radeon_state,
1409 struct ni_ps *state = ni_get_ps(radeon_state);
1410 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1411 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1412 u32 power_boost_limit = 0;
1415 if (ni_pi->enable_power_containment &&
1416 ni_pi->use_power_boost_limit) {
1417 NISLANDS_SMC_VOLTAGE_VALUE vddc;
1422 if (state->performance_level_count < 3)
1425 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1426 state->performance_levels[state->performance_level_count - 2].vddc,
1431 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1435 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1436 state->performance_levels[state->performance_level_count - 1].vddc,
1441 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1445 n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1446 d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1447 tmp = div64_u64(n, d);
1451 power_boost_limit = (u32)tmp;
1454 return power_boost_limit;
1457 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1458 bool adjust_polarity,
1461 u32 *near_tdp_limit)
1463 if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1466 if (adjust_polarity) {
1467 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1468 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1470 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1471 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1477 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1478 struct radeon_ps *radeon_state)
1480 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1481 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1483 if (ni_pi->enable_power_containment) {
1484 NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1485 u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1488 u32 power_boost_limit;
1491 if (scaling_factor == 0)
1494 memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1496 ret = ni_calculate_adjusted_tdp_limits(rdev,
1498 rdev->pm.dpm.tdp_adjustment,
1504 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1507 smc_table->dpm2Params.TDPLimit =
1508 cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1509 smc_table->dpm2Params.NearTDPLimit =
1510 cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1511 smc_table->dpm2Params.SafePowerLimit =
1512 cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1514 smc_table->dpm2Params.PowerBoostLimit =
1515 cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1517 ret = rv770_copy_bytes_to_smc(rdev,
1518 (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1519 offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1520 (u8 *)(&smc_table->dpm2Params.TDPLimit),
1521 sizeof(u32) * 4, pi->sram_end);
1529 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1530 u32 arb_freq_src, u32 arb_freq_dest)
1532 u32 mc_arb_dram_timing;
1533 u32 mc_arb_dram_timing2;
1537 switch (arb_freq_src) {
1538 case MC_CG_ARB_FREQ_F0:
1539 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1540 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1541 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1543 case MC_CG_ARB_FREQ_F1:
1544 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_1);
1545 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1546 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1548 case MC_CG_ARB_FREQ_F2:
1549 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_2);
1550 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1551 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1553 case MC_CG_ARB_FREQ_F3:
1554 mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_3);
1555 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1556 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1562 switch (arb_freq_dest) {
1563 case MC_CG_ARB_FREQ_F0:
1564 WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1565 WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1566 WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1568 case MC_CG_ARB_FREQ_F1:
1569 WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1570 WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1571 WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1573 case MC_CG_ARB_FREQ_F2:
1574 WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1575 WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1576 WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1578 case MC_CG_ARB_FREQ_F3:
1579 WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1580 WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1581 WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1587 mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1588 WREG32(MC_CG_CONFIG, mc_cg_config);
1589 WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1594 static int ni_init_arb_table_index(struct radeon_device *rdev)
1596 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1597 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1601 ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1602 &tmp, pi->sram_end);
1607 tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1609 return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1613 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1615 return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1618 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1620 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1621 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1625 ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1626 &tmp, pi->sram_end);
1630 tmp = (tmp >> 24) & 0xff;
1632 if (tmp == MC_CG_ARB_FREQ_F0)
1635 return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1638 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1639 struct rv7xx_pl *pl,
1640 SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1645 arb_regs->mc_arb_rfsh_rate =
1646 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1649 radeon_atom_set_engine_dram_timings(rdev,
1653 dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1654 dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1656 arb_regs->mc_arb_dram_timing = cpu_to_be32(dram_timing);
1657 arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1662 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1663 struct radeon_ps *radeon_state,
1664 unsigned int first_arb_set)
1666 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1667 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1668 struct ni_ps *state = ni_get_ps(radeon_state);
1669 SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1672 for (i = 0; i < state->performance_level_count; i++) {
1673 ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1677 ret = rv770_copy_bytes_to_smc(rdev,
1678 (u16)(ni_pi->arb_table_start +
1679 offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1680 sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1682 (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1690 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1691 struct radeon_ps *radeon_new_state)
1693 return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1694 NISLANDS_DRIVER_STATE_ARB_INDEX);
1697 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1698 struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1700 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1702 voltage->index = eg_pi->mvdd_high_index;
1703 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1706 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1707 struct radeon_ps *radeon_initial_state,
1708 NISLANDS_SMC_STATETABLE *table)
1710 struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1711 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1712 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1713 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1717 table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1718 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1719 table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1720 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1721 table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1722 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1723 table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1724 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1725 table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1726 cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1727 table->initialState.levels[0].mclk.vDLL_CNTL =
1728 cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1729 table->initialState.levels[0].mclk.vMPLL_SS =
1730 cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1731 table->initialState.levels[0].mclk.vMPLL_SS2 =
1732 cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1733 table->initialState.levels[0].mclk.mclk_value =
1734 cpu_to_be32(initial_state->performance_levels[0].mclk);
1736 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1737 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1738 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1739 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1740 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1741 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1742 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1743 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1744 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1745 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1746 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1747 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1748 table->initialState.levels[0].sclk.sclk_value =
1749 cpu_to_be32(initial_state->performance_levels[0].sclk);
1750 table->initialState.levels[0].arbRefreshState =
1751 NISLANDS_INITIAL_STATE_ARB_INDEX;
1753 table->initialState.levels[0].ACIndex = 0;
1755 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1756 initial_state->performance_levels[0].vddc,
1757 &table->initialState.levels[0].vddc);
1761 ret = ni_get_std_voltage_value(rdev,
1762 &table->initialState.levels[0].vddc,
1765 ni_populate_std_voltage_value(rdev, std_vddc,
1766 table->initialState.levels[0].vddc.index,
1767 &table->initialState.levels[0].std_vddc);
1770 if (eg_pi->vddci_control)
1771 ni_populate_voltage_value(rdev,
1772 &eg_pi->vddci_voltage_table,
1773 initial_state->performance_levels[0].vddci,
1774 &table->initialState.levels[0].vddci);
1776 ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1778 reg = CG_R(0xffff) | CG_L(0);
1779 table->initialState.levels[0].aT = cpu_to_be32(reg);
1781 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1783 if (pi->boot_in_gen2)
1784 table->initialState.levels[0].gen2PCIE = 1;
1786 table->initialState.levels[0].gen2PCIE = 0;
1788 if (pi->mem_gddr5) {
1789 table->initialState.levels[0].strobeMode =
1790 cypress_get_strobe_mode_settings(rdev,
1791 initial_state->performance_levels[0].mclk);
1793 if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1794 table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1796 table->initialState.levels[0].mcFlags = 0;
1799 table->initialState.levelCount = 1;
1801 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1803 table->initialState.levels[0].dpm2.MaxPS = 0;
1804 table->initialState.levels[0].dpm2.NearTDPDec = 0;
1805 table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1806 table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1808 reg = MIN_POWER_MASK | MAX_POWER_MASK;
1809 table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1811 reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1812 table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1817 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1818 NISLANDS_SMC_STATETABLE *table)
1820 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1821 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1822 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1823 u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
1824 u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1825 u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
1826 u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1827 u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
1828 u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
1829 u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
1830 u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
1831 u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1832 u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
1836 table->ACPIState = table->initialState;
1838 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1840 if (pi->acpi_vddc) {
1841 ret = ni_populate_voltage_value(rdev,
1842 &eg_pi->vddc_voltage_table,
1843 pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1847 ret = ni_get_std_voltage_value(rdev,
1848 &table->ACPIState.levels[0].vddc, &std_vddc);
1850 ni_populate_std_voltage_value(rdev, std_vddc,
1851 table->ACPIState.levels[0].vddc.index,
1852 &table->ACPIState.levels[0].std_vddc);
1855 if (pi->pcie_gen2) {
1856 if (pi->acpi_pcie_gen2)
1857 table->ACPIState.levels[0].gen2PCIE = 1;
1859 table->ACPIState.levels[0].gen2PCIE = 0;
1861 table->ACPIState.levels[0].gen2PCIE = 0;
1864 ret = ni_populate_voltage_value(rdev,
1865 &eg_pi->vddc_voltage_table,
1866 pi->min_vddc_in_table,
1867 &table->ACPIState.levels[0].vddc);
1871 ret = ni_get_std_voltage_value(rdev,
1872 &table->ACPIState.levels[0].vddc,
1875 ni_populate_std_voltage_value(rdev, std_vddc,
1876 table->ACPIState.levels[0].vddc.index,
1877 &table->ACPIState.levels[0].std_vddc);
1879 table->ACPIState.levels[0].gen2PCIE = 0;
1882 if (eg_pi->acpi_vddci) {
1883 if (eg_pi->vddci_control)
1884 ni_populate_voltage_value(rdev,
1885 &eg_pi->vddci_voltage_table,
1887 &table->ACPIState.levels[0].vddci);
1891 mpll_ad_func_cntl &= ~PDNB;
1893 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1896 mpll_dq_func_cntl &= ~PDNB;
1897 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1900 mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1909 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1918 dll_cntl |= (MRDCKA0_BYPASS |
1927 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1928 spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1930 table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1931 table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1932 table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1933 table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1934 table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1935 table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1937 table->ACPIState.levels[0].mclk.mclk_value = 0;
1939 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1940 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1941 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1942 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1944 table->ACPIState.levels[0].sclk.sclk_value = 0;
1946 ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1948 if (eg_pi->dynamic_ac_timing)
1949 table->ACPIState.levels[0].ACIndex = 1;
1951 table->ACPIState.levels[0].dpm2.MaxPS = 0;
1952 table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1953 table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1954 table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1956 reg = MIN_POWER_MASK | MAX_POWER_MASK;
1957 table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1959 reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1960 table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1965 static int ni_init_smc_table(struct radeon_device *rdev)
1967 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1968 struct ni_power_info *ni_pi = ni_get_pi(rdev);
1970 struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1971 NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1973 memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1975 ni_populate_smc_voltage_tables(rdev, table);
1977 switch (rdev->pm.int_thermal_type) {
1978 case THERMAL_TYPE_NI:
1979 case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1980 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1982 case THERMAL_TYPE_NONE:
1983 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1986 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1990 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1991 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1993 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1994 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1996 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1997 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
2000 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
2002 ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
2006 ret = ni_populate_smc_acpi_state(rdev, table);
2010 table->driverState = table->initialState;
2012 table->ULVState = table->initialState;
2014 ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
2015 NISLANDS_INITIAL_STATE_ARB_INDEX);
2019 return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
2020 sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
2023 static int ni_calculate_sclk_params(struct radeon_device *rdev,
2025 NISLANDS_SMC_SCLK_VALUE *sclk)
2027 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2028 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2029 struct atom_clock_dividers dividers;
2030 u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2031 u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2032 u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2033 u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2034 u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2035 u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2037 u32 reference_clock = rdev->clock.spll.reference_freq;
2038 u32 reference_divider;
2042 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2043 engine_clock, false, ÷rs);
2047 reference_divider = 1 + dividers.ref_div;
2050 tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2051 do_div(tmp, reference_clock);
2054 spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2055 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2056 spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2058 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2059 spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2061 spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2062 spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2063 spll_func_cntl_3 |= SPLL_DITHEN;
2066 struct radeon_atom_ss ss;
2067 u32 vco_freq = engine_clock * dividers.post_div;
2069 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2070 ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2071 u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2072 u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2074 cg_spll_spread_spectrum &= ~CLK_S_MASK;
2075 cg_spll_spread_spectrum |= CLK_S(clk_s);
2076 cg_spll_spread_spectrum |= SSEN;
2078 cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2079 cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2083 sclk->sclk_value = engine_clock;
2084 sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2085 sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2086 sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2087 sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2088 sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2089 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2094 static int ni_populate_sclk_value(struct radeon_device *rdev,
2096 NISLANDS_SMC_SCLK_VALUE *sclk)
2098 NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2101 ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2103 sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2104 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2105 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2106 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2107 sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2108 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2109 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2115 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2117 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2118 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2119 SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2120 NISLANDS_SMC_SCLK_VALUE sclk_params;
2129 if (ni_pi->spll_table_start == 0)
2132 spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2133 if (spll_table == NULL)
2136 for (i = 0; i < 256; i++) {
2137 ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2141 p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2142 fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2143 clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2144 clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2146 fb_div &= ~0x00001FFF;
2150 if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2153 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2156 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2159 if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2165 tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2166 ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2167 spll_table->freq[i] = cpu_to_be32(tmp);
2169 tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2170 ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2171 spll_table->ss[i] = cpu_to_be32(tmp);
2177 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2178 sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2185 static int ni_populate_mclk_value(struct radeon_device *rdev,
2188 NISLANDS_SMC_MCLK_VALUE *mclk,
2192 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2193 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2194 u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2195 u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2196 u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2197 u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2198 u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2199 u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2200 u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2201 u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2202 struct atom_clock_dividers dividers;
2208 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2209 memory_clock, strobe_mode, ÷rs);
2214 mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2216 if (mc_seq_misc7 & 0x8000000)
2217 dividers.post_div = 1;
2220 ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2222 mpll_ad_func_cntl &= ~(CLKR_MASK |
2223 YCLK_POST_DIV_MASK |
2227 mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2228 mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2229 mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2230 mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2231 mpll_ad_func_cntl |= IBIAS(ibias);
2233 if (dividers.vco_mode)
2234 mpll_ad_func_cntl_2 |= VCO_MODE;
2236 mpll_ad_func_cntl_2 &= ~VCO_MODE;
2238 if (pi->mem_gddr5) {
2239 mpll_dq_func_cntl &= ~(CLKR_MASK |
2240 YCLK_POST_DIV_MASK |
2244 mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2245 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2246 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2247 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2248 mpll_dq_func_cntl |= IBIAS(ibias);
2251 mpll_dq_func_cntl &= ~PDNB;
2253 mpll_dq_func_cntl |= PDNB;
2255 if (dividers.vco_mode)
2256 mpll_dq_func_cntl_2 |= VCO_MODE;
2258 mpll_dq_func_cntl_2 &= ~VCO_MODE;
2262 struct radeon_atom_ss ss;
2263 u32 vco_freq = memory_clock * dividers.post_div;
2265 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2266 ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2267 u32 reference_clock = rdev->clock.mpll.reference_freq;
2268 u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2269 u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2270 u32 clk_v = ss.percentage *
2271 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2273 mpll_ss1 &= ~CLKV_MASK;
2274 mpll_ss1 |= CLKV(clk_v);
2276 mpll_ss2 &= ~CLKS_MASK;
2277 mpll_ss2 |= CLKS(clk_s);
2281 dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2284 mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2285 mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2287 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2296 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2306 mclk->mclk_value = cpu_to_be32(memory_clock);
2307 mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2308 mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2309 mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2310 mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2311 mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2312 mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2313 mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2314 mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2319 static void ni_populate_smc_sp(struct radeon_device *rdev,
2320 struct radeon_ps *radeon_state,
2321 NISLANDS_SMC_SWSTATE *smc_state)
2323 struct ni_ps *ps = ni_get_ps(radeon_state);
2324 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2327 for (i = 0; i < ps->performance_level_count - 1; i++)
2328 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2330 smc_state->levels[ps->performance_level_count - 1].bSP =
2331 cpu_to_be32(pi->psp);
2334 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2335 struct rv7xx_pl *pl,
2336 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2338 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2339 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2340 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2344 u32 tmp = RREG32(DC_STUTTER_CNTL);
2346 level->gen2PCIE = pi->pcie_gen2 ?
2347 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2349 ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2354 if (pi->mclk_stutter_mode_threshold &&
2355 (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2356 !eg_pi->uvd_enabled &&
2357 (tmp & DC_STUTTER_ENABLE_A) &&
2358 (tmp & DC_STUTTER_ENABLE_B))
2359 level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2361 if (pi->mem_gddr5) {
2362 if (pl->mclk > pi->mclk_edc_enable_threshold)
2363 level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2364 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2365 level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2367 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2369 if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2370 if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2371 ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2372 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2374 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2376 dll_state_on = false;
2377 if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2378 level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2381 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2383 (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2386 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2391 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2392 pl->vddc, &level->vddc);
2396 ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2400 ni_populate_std_voltage_value(rdev, std_vddc,
2401 level->vddc.index, &level->std_vddc);
2403 if (eg_pi->vddci_control) {
2404 ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2405 pl->vddci, &level->vddci);
2410 ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2415 static int ni_populate_smc_t(struct radeon_device *rdev,
2416 struct radeon_ps *radeon_state,
2417 NISLANDS_SMC_SWSTATE *smc_state)
2419 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2420 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2421 struct ni_ps *state = ni_get_ps(radeon_state);
2427 if (state->performance_level_count >= 9)
2430 if (state->performance_level_count < 2) {
2431 a_t = CG_R(0xffff) | CG_L(0);
2432 smc_state->levels[0].aT = cpu_to_be32(a_t);
2436 smc_state->levels[0].aT = cpu_to_be32(0);
2438 for (i = 0; i <= state->performance_level_count - 2; i++) {
2439 if (eg_pi->uvd_enabled)
2440 ret = r600_calculate_at(
2441 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2443 state->performance_levels[i + 1].sclk,
2444 state->performance_levels[i].sclk,
2448 ret = r600_calculate_at(
2451 state->performance_levels[i + 1].sclk,
2452 state->performance_levels[i].sclk,
2457 t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2458 t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2461 a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2462 a_t |= CG_R(t_l * pi->bsp / 20000);
2463 smc_state->levels[i].aT = cpu_to_be32(a_t);
2465 high_bsp = (i == state->performance_level_count - 2) ?
2468 a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2469 smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2475 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2476 struct radeon_ps *radeon_state,
2477 NISLANDS_SMC_SWSTATE *smc_state)
2479 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2480 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2481 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2482 struct ni_ps *state = ni_get_ps(radeon_state);
2489 u32 power_boost_limit;
2492 if (ni_pi->enable_power_containment == false)
2495 if (state->performance_level_count == 0)
2498 if (smc_state->levelCount != state->performance_level_count)
2501 ret = ni_calculate_adjusted_tdp_limits(rdev,
2503 rdev->pm.dpm.tdp_adjustment,
2509 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2511 ret = rv770_write_smc_sram_dword(rdev,
2512 pi->state_table_start +
2513 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2514 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2515 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2518 power_boost_limit = 0;
2520 smc_state->levels[0].dpm2.MaxPS = 0;
2521 smc_state->levels[0].dpm2.NearTDPDec = 0;
2522 smc_state->levels[0].dpm2.AboveSafeInc = 0;
2523 smc_state->levels[0].dpm2.BelowSafeInc = 0;
2524 smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2526 for (i = 1; i < state->performance_level_count; i++) {
2527 prev_sclk = state->performance_levels[i-1].sclk;
2528 max_sclk = state->performance_levels[i].sclk;
2529 max_ps_percent = (i != (state->performance_level_count - 1)) ?
2530 NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2532 if (max_sclk < prev_sclk)
2535 if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2536 min_sclk = max_sclk;
2538 min_sclk = prev_sclk;
2540 min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2542 if (min_sclk < state->performance_levels[0].sclk)
2543 min_sclk = state->performance_levels[0].sclk;
2548 smc_state->levels[i].dpm2.MaxPS =
2549 (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2550 smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2551 smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2552 smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2553 smc_state->levels[i].stateFlags |=
2554 ((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2555 PPSMC_STATEFLAG_POWERBOOST : 0;
2561 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2562 struct radeon_ps *radeon_state,
2563 NISLANDS_SMC_SWSTATE *smc_state)
2565 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2566 struct ni_ps *state = ni_get_ps(radeon_state);
2567 u32 sq_power_throttle;
2568 u32 sq_power_throttle2;
2569 bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2572 if (state->performance_level_count == 0)
2575 if (smc_state->levelCount != state->performance_level_count)
2578 if (rdev->pm.dpm.sq_ramping_threshold == 0)
2581 if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2582 enable_sq_ramping = false;
2584 if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2585 enable_sq_ramping = false;
2587 if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2588 enable_sq_ramping = false;
2590 if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2591 enable_sq_ramping = false;
2593 if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO > (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2594 enable_sq_ramping = false;
2596 for (i = 0; i < state->performance_level_count; i++) {
2597 sq_power_throttle = 0;
2598 sq_power_throttle2 = 0;
2600 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2601 enable_sq_ramping) {
2602 sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2603 sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2604 sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2605 sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2606 sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2608 sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2609 sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2612 smc_state->levels[i].SQPowerThrottle = cpu_to_be32(sq_power_throttle);
2613 smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2619 static int ni_enable_power_containment(struct radeon_device *rdev,
2620 struct radeon_ps *radeon_new_state,
2623 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2624 PPSMC_Result smc_result;
2627 if (ni_pi->enable_power_containment) {
2629 if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2630 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2631 if (smc_result != PPSMC_Result_OK) {
2633 ni_pi->pc_enabled = false;
2635 ni_pi->pc_enabled = true;
2639 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2640 if (smc_result != PPSMC_Result_OK)
2642 ni_pi->pc_enabled = false;
2649 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2650 struct radeon_ps *radeon_state,
2651 NISLANDS_SMC_SWSTATE *smc_state)
2653 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2654 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2655 struct ni_ps *state = ni_get_ps(radeon_state);
2657 u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2659 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2660 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2662 smc_state->levelCount = 0;
2664 if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2667 for (i = 0; i < state->performance_level_count; i++) {
2668 ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2669 &smc_state->levels[i]);
2670 smc_state->levels[i].arbRefreshState =
2671 (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2676 if (ni_pi->enable_power_containment)
2677 smc_state->levels[i].displayWatermark =
2678 (state->performance_levels[i].sclk < threshold) ?
2679 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2681 smc_state->levels[i].displayWatermark = (i < 2) ?
2682 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2684 if (eg_pi->dynamic_ac_timing)
2685 smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2687 smc_state->levels[i].ACIndex = 0;
2689 smc_state->levelCount++;
2692 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2693 cpu_to_be32(threshold / 512));
2695 ni_populate_smc_sp(rdev, radeon_state, smc_state);
2697 ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2699 ni_pi->enable_power_containment = false;
2701 ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2703 ni_pi->enable_sq_ramping = false;
2705 return ni_populate_smc_t(rdev, radeon_state, smc_state);
2708 static int ni_upload_sw_state(struct radeon_device *rdev,
2709 struct radeon_ps *radeon_new_state)
2711 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2712 u16 address = pi->state_table_start +
2713 offsetof(NISLANDS_SMC_STATETABLE, driverState);
2714 u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2715 ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2717 NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2719 if (smc_state == NULL)
2722 ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2726 ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2734 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2735 struct ni_mc_reg_table *table)
2737 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2741 for (i = 0, j = table->last; i < table->last; i++) {
2742 switch (table->mc_reg_address[i].s1) {
2743 case MC_SEQ_MISC1 >> 2:
2744 if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2746 temp_reg = RREG32(MC_PMG_CMD_EMRS);
2747 table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2748 table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2749 for (k = 0; k < table->num_entries; k++)
2750 table->mc_reg_table_entry[k].mc_data[j] =
2751 ((temp_reg & 0xffff0000)) |
2752 ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2754 if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2757 temp_reg = RREG32(MC_PMG_CMD_MRS);
2758 table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2759 table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2760 for(k = 0; k < table->num_entries; k++) {
2761 table->mc_reg_table_entry[k].mc_data[j] =
2762 (temp_reg & 0xffff0000) |
2763 (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2765 table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2768 if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2771 case MC_SEQ_RESERVE_M >> 2:
2772 temp_reg = RREG32(MC_PMG_CMD_MRS1);
2773 table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2774 table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2775 for (k = 0; k < table->num_entries; k++)
2776 table->mc_reg_table_entry[k].mc_data[j] =
2777 (temp_reg & 0xffff0000) |
2778 (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2780 if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2793 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2798 case MC_SEQ_RAS_TIMING >> 2:
2799 *out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2801 case MC_SEQ_CAS_TIMING >> 2:
2802 *out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2804 case MC_SEQ_MISC_TIMING >> 2:
2805 *out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2807 case MC_SEQ_MISC_TIMING2 >> 2:
2808 *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2810 case MC_SEQ_RD_CTL_D0 >> 2:
2811 *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2813 case MC_SEQ_RD_CTL_D1 >> 2:
2814 *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2816 case MC_SEQ_WR_CTL_D0 >> 2:
2817 *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2819 case MC_SEQ_WR_CTL_D1 >> 2:
2820 *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2822 case MC_PMG_CMD_EMRS >> 2:
2823 *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2825 case MC_PMG_CMD_MRS >> 2:
2826 *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2828 case MC_PMG_CMD_MRS1 >> 2:
2829 *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2831 case MC_SEQ_PMG_TIMING >> 2:
2832 *out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2834 case MC_PMG_CMD_MRS2 >> 2:
2835 *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2845 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2849 for (i = 0; i < table->last; i++) {
2850 for (j = 1; j < table->num_entries; j++) {
2851 if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2852 table->valid_flag |= 1 << i;
2859 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2864 for (i = 0; i < table->last; i++)
2865 table->mc_reg_address[i].s0 =
2866 ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2867 address : table->mc_reg_address[i].s1;
2870 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2871 struct ni_mc_reg_table *ni_table)
2875 if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2877 if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2880 for (i = 0; i < table->last; i++)
2881 ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2882 ni_table->last = table->last;
2884 for (i = 0; i < table->num_entries; i++) {
2885 ni_table->mc_reg_table_entry[i].mclk_max =
2886 table->mc_reg_table_entry[i].mclk_max;
2887 for (j = 0; j < table->last; j++)
2888 ni_table->mc_reg_table_entry[i].mc_data[j] =
2889 table->mc_reg_table_entry[i].mc_data[j];
2891 ni_table->num_entries = table->num_entries;
2896 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2898 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2900 struct atom_mc_reg_table *table;
2901 struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2902 u8 module_index = rv770_get_memory_module_index(rdev);
2904 table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2908 WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2909 WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2910 WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2911 WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2912 WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2913 WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2914 WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2915 WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2916 WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2917 WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2918 WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2919 WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2920 WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2922 ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2927 ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2932 ni_set_s0_mc_reg_index(ni_table);
2934 ret = ni_set_mc_special_registers(rdev, ni_table);
2939 ni_set_valid_flag(ni_table);
2947 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2948 SMC_NIslands_MCRegisters *mc_reg_table)
2950 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2953 for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2954 if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2955 if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2957 mc_reg_table->address[i].s0 =
2958 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2959 mc_reg_table->address[i].s1 =
2960 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2964 mc_reg_table->last = (u8)i;
2968 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2969 SMC_NIslands_MCRegisterSet *data,
2970 u32 num_entries, u32 valid_flag)
2974 for (i = 0, j = 0; j < num_entries; j++) {
2975 if (valid_flag & (1 << j)) {
2976 data->value[i] = cpu_to_be32(entry->mc_data[j]);
2982 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2983 struct rv7xx_pl *pl,
2984 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2986 struct ni_power_info *ni_pi = ni_get_pi(rdev);
2989 for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2990 if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2994 if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2997 ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2999 ni_pi->mc_reg_table.last,
3000 ni_pi->mc_reg_table.valid_flag);
3003 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
3004 struct radeon_ps *radeon_state,
3005 SMC_NIslands_MCRegisters *mc_reg_table)
3007 struct ni_ps *state = ni_get_ps(radeon_state);
3010 for (i = 0; i < state->performance_level_count; i++) {
3011 ni_convert_mc_reg_table_entry_to_smc(rdev,
3012 &state->performance_levels[i],
3013 &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
3017 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
3018 struct radeon_ps *radeon_boot_state)
3020 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3021 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3022 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3023 struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
3024 SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3026 memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3028 rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3030 ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3032 ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3033 &mc_reg_table->data[0]);
3035 ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3036 &mc_reg_table->data[1],
3037 ni_pi->mc_reg_table.last,
3038 ni_pi->mc_reg_table.valid_flag);
3040 ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3042 return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3044 sizeof(SMC_NIslands_MCRegisters),
3048 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3049 struct radeon_ps *radeon_new_state)
3051 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3052 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3053 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3054 struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3055 SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3058 memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3060 ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3062 address = eg_pi->mc_reg_table_start +
3063 (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3065 return rv770_copy_bytes_to_smc(rdev, address,
3066 (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3067 sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3071 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3072 PP_NIslands_CACTABLES *cac_tables)
3074 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3075 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3077 unsigned int i, j, table_size;
3079 u32 smc_leakage, max_leakage = 0;
3082 table_size = eg_pi->vddc_voltage_table.count;
3084 if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3085 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3087 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3089 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3090 for (j = 0; j < table_size; j++) {
3091 t = (1000 * ((i + 1) * 8));
3093 if (t < ni_pi->cac_data.leakage_minimum_temperature)
3094 t = ni_pi->cac_data.leakage_minimum_temperature;
3096 ni_calculate_leakage_for_v_and_t(rdev,
3097 &ni_pi->cac_data.leakage_coefficients,
3098 eg_pi->vddc_voltage_table.entries[j].value,
3100 ni_pi->cac_data.i_leakage,
3103 smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3104 if (smc_leakage > max_leakage)
3105 max_leakage = smc_leakage;
3107 cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3111 for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3112 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3113 cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3118 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3119 PP_NIslands_CACTABLES *cac_tables)
3121 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3122 struct radeon_cac_leakage_table *leakage_table =
3123 &rdev->pm.dpm.dyn_state.cac_leakage_table;
3124 u32 i, j, table_size;
3125 u32 smc_leakage, max_leakage = 0;
3131 table_size = leakage_table->count;
3133 if (eg_pi->vddc_voltage_table.count != table_size)
3134 table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3135 eg_pi->vddc_voltage_table.count : leakage_table->count;
3137 if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3138 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3140 if (table_size == 0)
3143 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3145 for (j = 0; j < table_size; j++) {
3146 smc_leakage = leakage_table->entries[j].leakage;
3148 if (smc_leakage > max_leakage)
3149 max_leakage = smc_leakage;
3151 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3152 cac_tables->cac_lkge_lut[i][j] =
3153 cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3156 for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3157 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3158 cac_tables->cac_lkge_lut[i][j] =
3159 cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3164 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3166 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3167 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3168 PP_NIslands_CACTABLES *cac_tables = NULL;
3172 if (ni_pi->enable_cac == false)
3175 cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3179 reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3180 reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3181 TID_UNIT(ni_pi->cac_weights->tid_unit));
3182 WREG32(CG_CAC_CTRL, reg);
3184 for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3185 ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3187 for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3188 cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3190 ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3191 ni_pi->cac_data.pwr_const = 0;
3192 ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3193 ni_pi->cac_data.bif_cac_value = 0;
3194 ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3195 ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3196 ni_pi->cac_data.allow_ovrflw = 0;
3197 ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3198 ni_pi->cac_data.num_win_tdp = 0;
3199 ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3201 if (ni_pi->driver_calculate_cac_leakage)
3202 ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3204 ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3209 cac_tables->pwr_const = cpu_to_be32(ni_pi->cac_data.pwr_const);
3210 cac_tables->dc_cacValue = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3211 cac_tables->bif_cacValue = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3212 cac_tables->AllowOvrflw = ni_pi->cac_data.allow_ovrflw;
3213 cac_tables->MCWrWeight = ni_pi->cac_data.mc_wr_weight;
3214 cac_tables->MCRdWeight = ni_pi->cac_data.mc_rd_weight;
3215 cac_tables->numWin_TDP = ni_pi->cac_data.num_win_tdp;
3216 cac_tables->l2numWin_TDP = ni_pi->cac_data.l2num_win_tdp;
3217 cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3219 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3220 sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3224 ni_pi->enable_cac = false;
3225 ni_pi->enable_power_containment = false;
3233 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3235 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3238 if (!ni_pi->enable_cac ||
3239 !ni_pi->cac_configuration_required)
3242 if (ni_pi->cac_weights == NULL)
3245 reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3246 WEIGHT_TCP_SIG1_MASK |
3247 WEIGHT_TA_SIG_MASK);
3248 reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3249 WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3250 WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3251 WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3253 reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3254 WEIGHT_TCC_EN1_MASK |
3255 WEIGHT_TCC_EN2_MASK);
3256 reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3257 WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3258 WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3259 WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3261 reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3262 WEIGHT_CB_EN1_MASK |
3263 WEIGHT_CB_EN2_MASK |
3264 WEIGHT_CB_EN3_MASK);
3265 reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3266 WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3267 WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3268 WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3269 WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3271 reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3272 WEIGHT_DB_SIG1_MASK |
3273 WEIGHT_DB_SIG2_MASK |
3274 WEIGHT_DB_SIG3_MASK);
3275 reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3276 WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3277 WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3278 WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3279 WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3281 reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3282 WEIGHT_SXM_SIG1_MASK |
3283 WEIGHT_SXM_SIG2_MASK |
3284 WEIGHT_SXS_SIG0_MASK |
3285 WEIGHT_SXS_SIG1_MASK);
3286 reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3287 WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3288 WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3289 WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3290 WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3291 WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3293 reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3296 WEIGHT_SPI_SIG0_MASK);
3297 reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3298 WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3299 WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3300 WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3301 WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3303 reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3304 WEIGHT_SPI_SIG2_MASK |
3305 WEIGHT_SPI_SIG3_MASK |
3306 WEIGHT_SPI_SIG4_MASK |
3307 WEIGHT_SPI_SIG5_MASK);
3308 reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3309 WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3310 WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3311 WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3312 WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3313 WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3315 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3316 WEIGHT_LDS_SIG1_MASK |
3318 reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3319 WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3320 WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3321 WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3323 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3325 WEIGHT_PA_SIG0_MASK |
3326 WEIGHT_PA_SIG1_MASK |
3327 WEIGHT_VGT_SIG0_MASK);
3328 reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3329 WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3330 WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3331 WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3332 WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3333 WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3335 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3336 WEIGHT_VGT_SIG2_MASK |
3337 WEIGHT_DC_SIG0_MASK |
3338 WEIGHT_DC_SIG1_MASK |
3339 WEIGHT_DC_SIG2_MASK);
3340 reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3341 WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3342 WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3343 WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3344 WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3345 WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3347 reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3348 WEIGHT_UVD_SIG0_MASK |
3349 WEIGHT_UVD_SIG1_MASK |
3350 WEIGHT_SPARE0_MASK |
3351 WEIGHT_SPARE1_MASK);
3352 reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3353 WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3354 WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3355 WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3356 WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3357 WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3359 reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3360 WEIGHT_SQ_VSP0_MASK);
3361 reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3362 WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3363 WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3365 reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3366 reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3367 WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3369 reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3370 OVR_VAL_SPARE_0_MASK |
3371 OVR_MODE_SPARE_1_MASK |
3372 OVR_VAL_SPARE_1_MASK);
3373 reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3374 OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3375 OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3376 OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3377 WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3379 reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3382 reg |= (VSP(ni_pi->cac_weights->vsp) |
3383 VSP0(ni_pi->cac_weights->vsp0) |
3384 GPR(ni_pi->cac_weights->gpr));
3385 WREG32(SQ_CAC_THRESHOLD, reg);
3387 reg = (MCDW_WR_ENABLE |
3392 WREG32(MC_CG_CONFIG, reg);
3394 reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3395 WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3397 WREG32(MC_CG_DATAPORT, reg);
3402 static int ni_enable_smc_cac(struct radeon_device *rdev,
3403 struct radeon_ps *radeon_new_state,
3406 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3408 PPSMC_Result smc_result;
3410 if (ni_pi->enable_cac) {
3412 if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3413 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3415 if (ni_pi->support_cac_long_term_average) {
3416 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3417 if (PPSMC_Result_OK != smc_result)
3418 ni_pi->support_cac_long_term_average = false;
3421 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3422 if (PPSMC_Result_OK != smc_result)
3425 ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3427 } else if (ni_pi->cac_enabled) {
3428 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3430 ni_pi->cac_enabled = false;
3432 if (ni_pi->support_cac_long_term_average) {
3433 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3434 if (PPSMC_Result_OK != smc_result)
3435 ni_pi->support_cac_long_term_average = false;
3443 static int ni_pcie_performance_request(struct radeon_device *rdev,
3444 u8 perf_req, bool advertise)
3446 #if defined(CONFIG_ACPI)
3447 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3449 if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3450 (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3451 if (eg_pi->pcie_performance_request_registered == false)
3452 radeon_acpi_pcie_notify_device_ready(rdev);
3453 eg_pi->pcie_performance_request_registered = true;
3454 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3455 } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3456 eg_pi->pcie_performance_request_registered) {
3457 eg_pi->pcie_performance_request_registered = false;
3458 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3464 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3466 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3469 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3471 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3472 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3473 pi->pcie_gen2 = true;
3475 pi->pcie_gen2 = false;
3478 ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3483 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3486 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3489 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3491 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3492 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3494 if (!pi->boot_in_gen2) {
3495 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3496 bif |= CG_CLIENT_REQ(0xd);
3497 WREG32(CG_BIF_REQ_AND_RSP, bif);
3499 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3500 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3501 tmp |= LC_GEN2_EN_STRAP;
3503 tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3504 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3506 tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3507 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3509 if (!pi->boot_in_gen2) {
3510 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3511 bif |= CG_CLIENT_REQ(0xd);
3512 WREG32(CG_BIF_REQ_AND_RSP, bif);
3514 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3515 tmp &= ~LC_GEN2_EN_STRAP;
3517 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3522 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3525 ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3528 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3530 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3533 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3534 struct radeon_ps *new_ps,
3535 struct radeon_ps *old_ps)
3537 struct ni_ps *new_state = ni_get_ps(new_ps);
3538 struct ni_ps *current_state = ni_get_ps(old_ps);
3540 if ((new_ps->vclk == old_ps->vclk) &&
3541 (new_ps->dclk == old_ps->dclk))
3544 if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3545 current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3548 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3551 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3552 struct radeon_ps *new_ps,
3553 struct radeon_ps *old_ps)
3555 struct ni_ps *new_state = ni_get_ps(new_ps);
3556 struct ni_ps *current_state = ni_get_ps(old_ps);
3558 if ((new_ps->vclk == old_ps->vclk) &&
3559 (new_ps->dclk == old_ps->dclk))
3562 if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3563 current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3566 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3569 void ni_dpm_setup_asic(struct radeon_device *rdev)
3571 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3574 r = ni_mc_load_microcode(rdev);
3576 DRM_ERROR("Failed to load MC firmware!\n");
3577 ni_read_clock_registers(rdev);
3578 btc_read_arb_registers(rdev);
3579 rv770_get_memory_type(rdev);
3580 if (eg_pi->pcie_performance_request)
3581 ni_advertise_gen2_capability(rdev);
3582 rv770_get_pcie_gen2_status(rdev);
3583 rv770_enable_acpi_pm(rdev);
3586 void ni_update_current_ps(struct radeon_device *rdev,
3587 struct radeon_ps *rps)
3589 struct ni_ps *new_ps = ni_get_ps(rps);
3590 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3591 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3593 eg_pi->current_rps = *rps;
3594 ni_pi->current_ps = *new_ps;
3595 eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3598 void ni_update_requested_ps(struct radeon_device *rdev,
3599 struct radeon_ps *rps)
3601 struct ni_ps *new_ps = ni_get_ps(rps);
3602 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3603 struct ni_power_info *ni_pi = ni_get_pi(rdev);
3605 eg_pi->requested_rps = *rps;
3606 ni_pi->requested_ps = *new_ps;
3607 eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3610 int ni_dpm_enable(struct radeon_device *rdev)
3612 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3613 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3614 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3617 if (pi->gfx_clock_gating)
3618 ni_cg_clockgating_default(rdev);
3619 if (btc_dpm_enabled(rdev))
3621 if (pi->mg_clock_gating)
3622 ni_mg_clockgating_default(rdev);
3623 if (eg_pi->ls_clock_gating)
3624 ni_ls_clockgating_default(rdev);
3625 if (pi->voltage_control) {
3626 rv770_enable_voltage_control(rdev, true);
3627 ret = cypress_construct_voltage_tables(rdev);
3629 DRM_ERROR("cypress_construct_voltage_tables failed\n");
3633 if (eg_pi->dynamic_ac_timing) {
3634 ret = ni_initialize_mc_reg_table(rdev);
3636 eg_pi->dynamic_ac_timing = false;
3639 cypress_enable_spread_spectrum(rdev, true);
3640 if (pi->thermal_protection)
3641 rv770_enable_thermal_protection(rdev, true);
3642 rv770_setup_bsp(rdev);
3643 rv770_program_git(rdev);
3644 rv770_program_tp(rdev);
3645 rv770_program_tpp(rdev);
3646 rv770_program_sstp(rdev);
3647 cypress_enable_display_gap(rdev);
3648 rv770_program_vc(rdev);
3649 if (pi->dynamic_pcie_gen2)
3650 ni_enable_dynamic_pcie_gen2(rdev, true);
3651 ret = rv770_upload_firmware(rdev);
3653 DRM_ERROR("rv770_upload_firmware failed\n");
3656 ret = ni_process_firmware_header(rdev);
3658 DRM_ERROR("ni_process_firmware_header failed\n");
3661 ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3663 DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3666 ret = ni_init_smc_table(rdev);
3668 DRM_ERROR("ni_init_smc_table failed\n");
3671 ret = ni_init_smc_spll_table(rdev);
3673 DRM_ERROR("ni_init_smc_spll_table failed\n");
3676 ret = ni_init_arb_table_index(rdev);
3678 DRM_ERROR("ni_init_arb_table_index failed\n");
3681 if (eg_pi->dynamic_ac_timing) {
3682 ret = ni_populate_mc_reg_table(rdev, boot_ps);
3684 DRM_ERROR("ni_populate_mc_reg_table failed\n");
3688 ret = ni_initialize_smc_cac_tables(rdev);
3690 DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3693 ret = ni_initialize_hardware_cac_manager(rdev);
3695 DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3698 ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3700 DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3703 ni_program_response_times(rdev);
3704 r7xx_start_smc(rdev);
3705 ret = cypress_notify_smc_display_change(rdev, false);
3707 DRM_ERROR("cypress_notify_smc_display_change failed\n");
3710 cypress_enable_sclk_control(rdev, true);
3711 if (eg_pi->memory_transition)
3712 cypress_enable_mclk_control(rdev, true);
3713 cypress_start_dpm(rdev);
3714 if (pi->gfx_clock_gating)
3715 ni_gfx_clockgating_enable(rdev, true);
3716 if (pi->mg_clock_gating)
3717 ni_mg_clockgating_enable(rdev, true);
3718 if (eg_pi->ls_clock_gating)
3719 ni_ls_clockgating_enable(rdev, true);
3721 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3723 ni_update_current_ps(rdev, boot_ps);
3728 void ni_dpm_disable(struct radeon_device *rdev)
3730 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3731 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3732 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3734 if (!btc_dpm_enabled(rdev))
3736 rv770_clear_vc(rdev);
3737 if (pi->thermal_protection)
3738 rv770_enable_thermal_protection(rdev, false);
3739 ni_enable_power_containment(rdev, boot_ps, false);
3740 ni_enable_smc_cac(rdev, boot_ps, false);
3741 cypress_enable_spread_spectrum(rdev, false);
3742 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3743 if (pi->dynamic_pcie_gen2)
3744 ni_enable_dynamic_pcie_gen2(rdev, false);
3746 if (rdev->irq.installed &&
3747 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3748 rdev->irq.dpm_thermal = false;
3749 radeon_irq_set(rdev);
3752 if (pi->gfx_clock_gating)
3753 ni_gfx_clockgating_enable(rdev, false);
3754 if (pi->mg_clock_gating)
3755 ni_mg_clockgating_enable(rdev, false);
3756 if (eg_pi->ls_clock_gating)
3757 ni_ls_clockgating_enable(rdev, false);
3759 btc_reset_to_default(rdev);
3761 ni_force_switch_to_arb_f0(rdev);
3763 ni_update_current_ps(rdev, boot_ps);
3766 static int ni_power_control_set_level(struct radeon_device *rdev)
3768 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3771 ret = ni_restrict_performance_levels_before_switch(rdev);
3774 ret = rv770_halt_smc(rdev);
3777 ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3780 ret = rv770_resume_smc(rdev);
3783 ret = rv770_set_sw_state(rdev);
3790 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3792 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3793 struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3794 struct radeon_ps *new_ps = &requested_ps;
3796 ni_update_requested_ps(rdev, new_ps);
3798 ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3803 int ni_dpm_set_power_state(struct radeon_device *rdev)
3805 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3806 struct radeon_ps *new_ps = &eg_pi->requested_rps;
3807 struct radeon_ps *old_ps = &eg_pi->current_rps;
3810 ret = ni_restrict_performance_levels_before_switch(rdev);
3812 DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3815 ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3816 ret = ni_enable_power_containment(rdev, new_ps, false);
3818 DRM_ERROR("ni_enable_power_containment failed\n");
3821 ret = ni_enable_smc_cac(rdev, new_ps, false);
3823 DRM_ERROR("ni_enable_smc_cac failed\n");
3826 ret = rv770_halt_smc(rdev);
3828 DRM_ERROR("rv770_halt_smc failed\n");
3831 if (eg_pi->smu_uvd_hs)
3832 btc_notify_uvd_to_smc(rdev, new_ps);
3833 ret = ni_upload_sw_state(rdev, new_ps);
3835 DRM_ERROR("ni_upload_sw_state failed\n");
3838 if (eg_pi->dynamic_ac_timing) {
3839 ret = ni_upload_mc_reg_table(rdev, new_ps);
3841 DRM_ERROR("ni_upload_mc_reg_table failed\n");
3845 ret = ni_program_memory_timing_parameters(rdev, new_ps);
3847 DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3850 ret = rv770_resume_smc(rdev);
3852 DRM_ERROR("rv770_resume_smc failed\n");
3855 ret = rv770_set_sw_state(rdev);
3857 DRM_ERROR("rv770_set_sw_state failed\n");
3860 ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3861 ret = ni_enable_smc_cac(rdev, new_ps, true);
3863 DRM_ERROR("ni_enable_smc_cac failed\n");
3866 ret = ni_enable_power_containment(rdev, new_ps, true);
3868 DRM_ERROR("ni_enable_power_containment failed\n");
3873 ret = ni_power_control_set_level(rdev);
3875 DRM_ERROR("ni_power_control_set_level failed\n");
3882 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3884 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3885 struct radeon_ps *new_ps = &eg_pi->requested_rps;
3887 ni_update_current_ps(rdev, new_ps);
3890 void ni_dpm_reset_asic(struct radeon_device *rdev)
3892 ni_restrict_performance_levels_before_switch(rdev);
3893 rv770_set_boot_state(rdev);
3897 struct _ATOM_POWERPLAY_INFO info;
3898 struct _ATOM_POWERPLAY_INFO_V2 info_2;
3899 struct _ATOM_POWERPLAY_INFO_V3 info_3;
3900 struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3901 struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3902 struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3905 union pplib_clock_info {
3906 struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3907 struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3908 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3909 struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3912 union pplib_power_state {
3913 struct _ATOM_PPLIB_STATE v1;
3914 struct _ATOM_PPLIB_STATE_V2 v2;
3917 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3918 struct radeon_ps *rps,
3919 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3922 rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3923 rps->class = le16_to_cpu(non_clock_info->usClassification);
3924 rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3926 if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3927 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3928 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3929 } else if (r600_is_uvd_state(rps->class, rps->class2)) {
3930 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3931 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3937 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3938 rdev->pm.dpm.boot_ps = rps;
3939 if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3940 rdev->pm.dpm.uvd_ps = rps;
3943 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3944 struct radeon_ps *rps, int index,
3945 union pplib_clock_info *clock_info)
3947 struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3948 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3949 struct ni_ps *ps = ni_get_ps(rps);
3950 struct rv7xx_pl *pl = &ps->performance_levels[index];
3952 ps->performance_level_count = index + 1;
3954 pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3955 pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3956 pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3957 pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3959 pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3960 pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3961 pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3963 /* patch up vddc if necessary */
3964 if (pl->vddc == 0xff01) {
3966 pl->vddc = pi->max_vddc;
3969 if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3970 pi->acpi_vddc = pl->vddc;
3971 eg_pi->acpi_vddci = pl->vddci;
3972 if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3973 pi->acpi_pcie_gen2 = true;
3975 pi->acpi_pcie_gen2 = false;
3978 if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3979 eg_pi->ulv.supported = true;
3983 if (pi->min_vddc_in_table > pl->vddc)
3984 pi->min_vddc_in_table = pl->vddc;
3986 if (pi->max_vddc_in_table < pl->vddc)
3987 pi->max_vddc_in_table = pl->vddc;
3989 /* patch up boot state */
3990 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3991 u16 vddc, vddci, mvdd;
3992 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3993 pl->mclk = rdev->clock.default_mclk;
3994 pl->sclk = rdev->clock.default_sclk;
3999 if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
4000 ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
4001 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
4002 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
4003 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
4004 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
4008 static int ni_parse_power_table(struct radeon_device *rdev)
4010 struct radeon_mode_info *mode_info = &rdev->mode_info;
4011 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
4012 union pplib_power_state *power_state;
4014 union pplib_clock_info *clock_info;
4015 union power_info *power_info;
4016 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
4021 if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
4022 &frev, &crev, &data_offset))
4024 power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset);
4026 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4027 power_info->pplib.ucNumStates, GFP_KERNEL);
4028 if (!rdev->pm.dpm.ps)
4031 for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4032 power_state = (union pplib_power_state *)
4033 ((uint8_t*)mode_info->atom_context->bios + data_offset +
4034 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4035 i * power_info->pplib.ucStateEntrySize);
4036 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4037 ((uint8_t*)mode_info->atom_context->bios + data_offset +
4038 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4039 (power_state->v1.ucNonClockStateIndex *
4040 power_info->pplib.ucNonClockSize));
4041 if (power_info->pplib.ucStateEntrySize - 1) {
4043 ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4045 kfree(rdev->pm.dpm.ps);
4048 rdev->pm.dpm.ps[i].ps_priv = ps;
4049 ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4051 power_info->pplib.ucNonClockSize);
4052 idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
4053 for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4054 clock_info = (union pplib_clock_info *)
4055 ((uint8_t*)mode_info->atom_context->bios + data_offset +
4056 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4057 (idx[j] * power_info->pplib.ucClockInfoSize));
4058 ni_parse_pplib_clock_info(rdev,
4059 &rdev->pm.dpm.ps[i], j,
4064 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4068 int ni_dpm_init(struct radeon_device *rdev)
4070 struct rv7xx_power_info *pi;
4071 struct evergreen_power_info *eg_pi;
4072 struct ni_power_info *ni_pi;
4073 struct atom_clock_dividers dividers;
4076 ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4079 rdev->pm.dpm.priv = ni_pi;
4083 rv770_get_max_vddc(rdev);
4085 eg_pi->ulv.supported = false;
4087 eg_pi->acpi_vddci = 0;
4088 pi->min_vddc_in_table = 0;
4089 pi->max_vddc_in_table = 0;
4091 ret = r600_get_platform_caps(rdev);
4095 ret = ni_parse_power_table(rdev);
4098 ret = r600_parse_extended_power_table(rdev);
4102 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4103 kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4104 if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4105 r600_free_extended_power_table(rdev);
4108 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4109 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4110 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4111 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4112 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4113 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4114 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4115 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4116 rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4118 ni_patch_dependency_tables_based_on_leakage(rdev);
4120 if (rdev->pm.dpm.voltage_response_time == 0)
4121 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4122 if (rdev->pm.dpm.backbias_response_time == 0)
4123 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4125 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4126 0, false, ÷rs);
4128 pi->ref_div = dividers.ref_div + 1;
4130 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4132 pi->rlp = RV770_RLP_DFLT;
4133 pi->rmp = RV770_RMP_DFLT;
4134 pi->lhp = RV770_LHP_DFLT;
4135 pi->lmp = RV770_LMP_DFLT;
4137 eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4138 eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4139 eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4140 eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4142 eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4143 eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4144 eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4145 eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4147 eg_pi->smu_uvd_hs = true;
4149 if (rdev->pdev->device == 0x6707) {
4150 pi->mclk_strobe_mode_threshold = 55000;
4151 pi->mclk_edc_enable_threshold = 55000;
4152 eg_pi->mclk_edc_wr_enable_threshold = 55000;
4154 pi->mclk_strobe_mode_threshold = 40000;
4155 pi->mclk_edc_enable_threshold = 40000;
4156 eg_pi->mclk_edc_wr_enable_threshold = 40000;
4158 ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4160 pi->voltage_control =
4161 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4164 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4166 eg_pi->vddci_control =
4167 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4169 rv770_get_engine_memory_ss(rdev);
4171 pi->asi = RV770_ASI_DFLT;
4172 pi->pasi = CYPRESS_HASI_DFLT;
4173 pi->vrc = CYPRESS_VRC_DFLT;
4175 pi->power_gating = false;
4177 pi->gfx_clock_gating = true;
4179 pi->mg_clock_gating = true;
4180 pi->mgcgtssm = true;
4181 eg_pi->ls_clock_gating = false;
4182 eg_pi->sclk_deep_sleep = false;
4184 pi->dynamic_pcie_gen2 = true;
4186 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4187 pi->thermal_protection = true;
4189 pi->thermal_protection = false;
4191 pi->display_gap = true;
4197 eg_pi->dynamic_ac_timing = true;
4200 eg_pi->light_sleep = true;
4201 eg_pi->memory_transition = true;
4202 #if defined(CONFIG_ACPI)
4203 eg_pi->pcie_performance_request =
4204 radeon_acpi_is_pcie_performance_request_supported(rdev);
4206 eg_pi->pcie_performance_request = false;
4209 eg_pi->dll_default_on = false;
4211 eg_pi->sclk_deep_sleep = false;
4213 pi->mclk_stutter_mode_threshold = 0;
4215 pi->sram_end = SMC_RAM_END;
4217 rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4218 rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4219 rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4220 rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4221 rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4222 rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4223 rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4224 rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4226 ni_pi->cac_data.leakage_coefficients.at = 516;
4227 ni_pi->cac_data.leakage_coefficients.bt = 18;
4228 ni_pi->cac_data.leakage_coefficients.av = 51;
4229 ni_pi->cac_data.leakage_coefficients.bv = 2957;
4231 switch (rdev->pdev->device) {
4237 ni_pi->cac_weights = &cac_weights_cayman_xt;
4244 ni_pi->cac_weights = &cac_weights_cayman_pro;
4251 ni_pi->cac_weights = &cac_weights_cayman_le;
4255 if (ni_pi->cac_weights->enable_power_containment_by_default) {
4256 ni_pi->enable_power_containment = true;
4257 ni_pi->enable_cac = true;
4258 ni_pi->enable_sq_ramping = true;
4260 ni_pi->enable_power_containment = false;
4261 ni_pi->enable_cac = false;
4262 ni_pi->enable_sq_ramping = false;
4265 ni_pi->driver_calculate_cac_leakage = false;
4266 ni_pi->cac_configuration_required = true;
4268 if (ni_pi->cac_configuration_required) {
4269 ni_pi->support_cac_long_term_average = true;
4270 ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4271 ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4273 ni_pi->support_cac_long_term_average = false;
4274 ni_pi->lta_window_size = 0;
4275 ni_pi->lts_truncate = 0;
4278 ni_pi->use_power_boost_limit = true;
4280 /* make sure dc limits are valid */
4281 if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
4282 (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
4283 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
4284 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4289 void ni_dpm_fini(struct radeon_device *rdev)
4293 for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4294 kfree(rdev->pm.dpm.ps[i].ps_priv);
4296 kfree(rdev->pm.dpm.ps);
4297 kfree(rdev->pm.dpm.priv);
4298 kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4299 r600_free_extended_power_table(rdev);
4302 void ni_dpm_print_power_state(struct radeon_device *rdev,
4303 struct radeon_ps *rps)
4305 struct ni_ps *ps = ni_get_ps(rps);
4306 struct rv7xx_pl *pl;
4309 r600_dpm_print_class_info(rps->class, rps->class2);
4310 r600_dpm_print_cap_info(rps->caps);
4311 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4312 for (i = 0; i < ps->performance_level_count; i++) {
4313 pl = &ps->performance_levels[i];
4314 if (rdev->family >= CHIP_TAHITI)
4315 printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4316 i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4318 printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
4319 i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4321 r600_dpm_print_ps_status(rdev, rps);
4324 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4327 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4328 struct radeon_ps *rps = &eg_pi->current_rps;
4329 struct ni_ps *ps = ni_get_ps(rps);
4330 struct rv7xx_pl *pl;
4332 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4333 CURRENT_STATE_INDEX_SHIFT;
4335 if (current_index >= ps->performance_level_count) {
4336 seq_printf(m, "invalid dpm profile %d\n", current_index);
4338 pl = &ps->performance_levels[current_index];
4339 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4340 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
4341 current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4345 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4347 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4348 struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4351 return requested_state->performance_levels[0].sclk;
4353 return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4356 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4358 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4359 struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4362 return requested_state->performance_levels[0].mclk;
4364 return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;