Lines Matching defs:smu8_smu

332 	struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
333 struct TOC *toc = (struct TOC *)smu8_smu->toc_buffer.kaddr;
334 struct SMU_Task *task = &toc->tasks[smu8_smu->toc_entry_used_count++];
338 task->next = is_last ? END_OF_TASK_LIST : smu8_smu->toc_entry_used_count;
340 for (i = 0; i < smu8_smu->scratch_buffer_length; i++)
341 if (smu8_smu->scratch_buffer[i].firmware_ID == fw_enum)
344 if (i >= smu8_smu->scratch_buffer_length) {
349 task->addr.low = lower_32_bits(smu8_smu->scratch_buffer[i].mc_addr);
350 task->addr.high = upper_32_bits(smu8_smu->scratch_buffer[i].mc_addr);
351 task->size_bytes = smu8_smu->scratch_buffer[i].data_size;
355 (struct smu8_ih_meta_data *)smu8_smu->scratch_buffer[i].kaddr;
369 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
370 struct TOC *toc = (struct TOC *)smu8_smu->toc_buffer.kaddr;
371 struct SMU_Task *task = &toc->tasks[smu8_smu->toc_entry_used_count++];
375 task->next = is_last ? END_OF_TASK_LIST : smu8_smu->toc_entry_used_count;
377 for (i = 0; i < smu8_smu->driver_buffer_length; i++)
378 if (smu8_smu->driver_buffer[i].firmware_ID == fw_enum)
381 if (i >= smu8_smu->driver_buffer_length) {
386 task->addr.low = lower_32_bits(smu8_smu->driver_buffer[i].mc_addr);
387 task->addr.high = upper_32_bits(smu8_smu->driver_buffer[i].mc_addr);
388 task->size_bytes = smu8_smu->driver_buffer[i].data_size;
395 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
397 smu8_smu->toc_entry_aram = smu8_smu->toc_entry_used_count;
408 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
409 struct TOC *toc = (struct TOC *)smu8_smu->toc_buffer.kaddr;
419 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
420 struct TOC *toc = (struct TOC *)smu8_smu->toc_buffer.kaddr;
422 toc->JobList[JOB_GFX_SAVE] = (uint8_t)smu8_smu->toc_entry_used_count;
437 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
438 struct TOC *toc = (struct TOC *)smu8_smu->toc_buffer.kaddr;
440 toc->JobList[JOB_GFX_RESTORE] = (uint8_t)smu8_smu->toc_entry_used_count;
479 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
481 smu8_smu->toc_entry_power_profiling_index = smu8_smu->toc_entry_used_count;
491 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
493 smu8_smu->toc_entry_initialize_index = smu8_smu->toc_entry_used_count;
519 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
521 smu8_smu->toc_entry_clock_table = smu8_smu->toc_entry_used_count;
532 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
534 smu8_smu->toc_entry_used_count = 0;
548 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
555 smu8_smu->driver_buffer_length = 0;
568 smu8_smu->driver_buffer[i].mc_addr = info.mc_addr;
570 smu8_smu->driver_buffer[i].data_size = info.image_size;
572 smu8_smu->driver_buffer[i].firmware_ID = firmware_list[i];
573 smu8_smu->driver_buffer_length++;
586 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
590 entry->kaddr = (char *) smu8_smu->smu_buffer.kaddr +
591 smu8_smu->smu_buffer_used_bytes;
592 entry->mc_addr = smu8_smu->smu_buffer.mc_addr + smu8_smu->smu_buffer_used_bytes;
595 smu8_smu->smu_buffer_used_bytes += ulsize_aligned;
602 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
605 for (i = 0; i < smu8_smu->scratch_buffer_length; i++) {
606 if (smu8_smu->scratch_buffer[i].firmware_ID
611 *table = (struct SMU8_Fusion_ClkTable *)smu8_smu->scratch_buffer[i].kaddr;
615 upper_32_bits(smu8_smu->scratch_buffer[i].mc_addr),
620 lower_32_bits(smu8_smu->scratch_buffer[i].mc_addr),
624 smu8_smu->toc_entry_clock_table,
634 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
637 for (i = 0; i < smu8_smu->scratch_buffer_length; i++) {
638 if (smu8_smu->scratch_buffer[i].firmware_ID
645 upper_32_bits(smu8_smu->scratch_buffer[i].mc_addr),
650 lower_32_bits(smu8_smu->scratch_buffer[i].mc_addr),
654 smu8_smu->toc_entry_clock_table,
664 struct smu8_smumgr *smu8_smu = hwmgr->smu_backend;
682 upper_32_bits(smu8_smu->toc_buffer.mc_addr),
687 lower_32_bits(smu8_smu->toc_buffer.mc_addr),
694 smu8_smu->toc_entry_aram,
697 smu8_smu->toc_entry_power_profiling_index,
702 smu8_smu->toc_entry_initialize_index,
759 struct smu8_smumgr *smu8_smu;
761 smu8_smu = kzalloc(sizeof(struct smu8_smumgr), GFP_KERNEL);
762 if (smu8_smu == NULL)
765 hwmgr->smu_backend = smu8_smu;
767 smu8_smu->toc_buffer.data_size = 4096;
768 smu8_smu->smu_buffer.data_size =
776 smu8_smu->toc_buffer.data_size,
779 &smu8_smu->toc_buffer.handle,
780 &smu8_smu->toc_buffer.mc_addr,
781 &smu8_smu->toc_buffer.kaddr);
786 smu8_smu->smu_buffer.data_size,
789 &smu8_smu->smu_buffer.handle,
790 &smu8_smu->smu_buffer.mc_addr,
791 &smu8_smu->smu_buffer.kaddr);
798 &smu8_smu->scratch_buffer[smu8_smu->scratch_buffer_length++])) {
806 &smu8_smu->scratch_buffer[smu8_smu->scratch_buffer_length++])) {
813 &smu8_smu->scratch_buffer[smu8_smu->scratch_buffer_length++])) {
821 &smu8_smu->scratch_buffer[smu8_smu->scratch_buffer_length++])) {
829 &smu8_smu->scratch_buffer[smu8_smu->scratch_buffer_length++])) {
837 amdgpu_bo_free_kernel(&smu8_smu->smu_buffer.handle,
838 &smu8_smu->smu_buffer.mc_addr,
839 &smu8_smu->smu_buffer.kaddr);
841 amdgpu_bo_free_kernel(&smu8_smu->toc_buffer.handle,
842 &smu8_smu->toc_buffer.mc_addr,
843 &smu8_smu->toc_buffer.kaddr);
845 kfree(smu8_smu);
851 struct smu8_smumgr *smu8_smu;
856 smu8_smu = hwmgr->smu_backend;
857 if (smu8_smu) {
858 amdgpu_bo_free_kernel(&smu8_smu->toc_buffer.handle,
859 &smu8_smu->toc_buffer.mc_addr,
860 &smu8_smu->toc_buffer.kaddr);
861 amdgpu_bo_free_kernel(&smu8_smu->smu_buffer.handle,
862 &smu8_smu->smu_buffer.mc_addr,
863 &smu8_smu->smu_buffer.kaddr);
864 kfree(smu8_smu);
896 .name = "smu8_smu",