2 * Copyright 2008 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Author: Stanislaw Skowronek
25 #include <linux/module.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/string_helpers.h>
30 #include <asm/unaligned.h>
32 #include <drm/drm_util.h>
36 #include "atomfirmware.h"
38 #include "atom-names.h"
39 #include "atom-bits.h"
42 #define ATOM_COND_ABOVE 0
43 #define ATOM_COND_ABOVEOREQUAL 1
44 #define ATOM_COND_ALWAYS 2
45 #define ATOM_COND_BELOW 3
46 #define ATOM_COND_BELOWOREQUAL 4
47 #define ATOM_COND_EQUAL 5
48 #define ATOM_COND_NOTEQUAL 6
50 #define ATOM_PORT_ATI 0
51 #define ATOM_PORT_PCI 1
52 #define ATOM_PORT_SYSIO 2
54 #define ATOM_UNIT_MICROSEC 0
55 #define ATOM_UNIT_MILLISEC 1
60 #define ATOM_CMD_TIMEOUT_SEC 20
63 struct atom_context *ctx;
68 unsigned long last_jump_jiffies;
72 int amdgpu_atom_debug;
73 static int amdgpu_atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t *params);
74 int amdgpu_atom_execute_table(struct atom_context *ctx, int index, uint32_t *params);
76 static uint32_t atom_arg_mask[8] =
77 { 0xFFFFFFFF, 0xFFFF, 0xFFFF00, 0xFFFF0000, 0xFF, 0xFF00, 0xFF0000,
79 static int atom_arg_shift[8] = { 0, 0, 8, 16, 0, 8, 16, 24 };
81 static int atom_dst_to_src[8][4] = {
82 /* translate destination alignment field to the source alignment encoding */
92 static int atom_def_dst[8] = { 0, 0, 1, 2, 0, 1, 2, 3 };
94 static int debug_depth;
96 static void debug_print_spaces(int n)
102 #define DEBUG(...) do if (amdgpu_atom_debug) { printk(KERN_DEBUG __VA_ARGS__); } while (0)
103 #define SDEBUG(...) do if (amdgpu_atom_debug) { printk(KERN_DEBUG); debug_print_spaces(debug_depth); printk(__VA_ARGS__); } while (0)
105 #define DEBUG(...) do { } while (0)
106 #define SDEBUG(...) do { } while (0)
109 static uint32_t atom_iio_execute(struct atom_context *ctx, int base,
110 uint32_t index, uint32_t data)
112 uint32_t temp = 0xCDCDCDCD;
120 temp = ctx->card->reg_read(ctx->card, CU16(base + 1));
124 ctx->card->reg_write(ctx->card, CU16(base + 1), temp);
129 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
135 (0xFFFFFFFF >> (32 - CU8(base + 1))) << CU8(base +
139 case ATOM_IIO_MOVE_INDEX:
141 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
144 ((index >> CU8(base + 2)) &
145 (0xFFFFFFFF >> (32 - CU8(base + 1)))) << CU8(base +
149 case ATOM_IIO_MOVE_DATA:
151 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
154 ((data >> CU8(base + 2)) &
155 (0xFFFFFFFF >> (32 - CU8(base + 1)))) << CU8(base +
159 case ATOM_IIO_MOVE_ATTR:
161 ~((0xFFFFFFFF >> (32 - CU8(base + 1))) <<
165 io_attr >> CU8(base + 2)) & (0xFFFFFFFF >> (32 -
176 pr_info("Unknown IIO opcode\n");
181 static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
182 int *ptr, uint32_t *saved, int print)
184 uint32_t idx, val = 0xCDCDCDCD, align, arg;
185 struct atom_context *gctx = ctx->ctx;
187 align = (attr >> 3) & 7;
193 DEBUG("REG[0x%04X]", idx);
194 idx += gctx->reg_block;
195 switch (gctx->io_mode) {
197 val = gctx->card->reg_read(gctx->card, idx);
200 pr_info("PCI registers are not implemented\n");
203 pr_info("SYSIO registers are not implemented\n");
206 if (!(gctx->io_mode & 0x80)) {
207 pr_info("Bad IO mode\n");
210 if (!gctx->iio[gctx->io_mode & 0x7F]) {
211 pr_info("Undefined indirect IO read method %d\n",
212 gctx->io_mode & 0x7F);
216 atom_iio_execute(gctx,
217 gctx->iio[gctx->io_mode & 0x7F],
224 /* get_unaligned_le32 avoids unaligned accesses from atombios
225 * tables, noticed on a DEC Alpha. */
226 val = get_unaligned_le32((u32 *)&ctx->ps[idx]);
228 DEBUG("PS[0x%02X,0x%04X]", idx, val);
234 DEBUG("WS[0x%02X]", idx);
236 case ATOM_WS_QUOTIENT:
237 val = gctx->divmul[0];
239 case ATOM_WS_REMAINDER:
240 val = gctx->divmul[1];
242 case ATOM_WS_DATAPTR:
243 val = gctx->data_block;
248 case ATOM_WS_OR_MASK:
249 val = 1 << gctx->shift;
251 case ATOM_WS_AND_MASK:
252 val = ~(1 << gctx->shift);
254 case ATOM_WS_FB_WINDOW:
257 case ATOM_WS_ATTRIBUTES:
261 val = gctx->reg_block;
271 if (gctx->data_block)
272 DEBUG("ID[0x%04X+%04X]", idx, gctx->data_block);
274 DEBUG("ID[0x%04X]", idx);
276 val = U32(idx + gctx->data_block);
281 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
282 DRM_ERROR("ATOM: fb read beyond scratch region: %d vs. %d\n",
283 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
286 val = gctx->scratch[(gctx->fb_base / 4) + idx];
288 DEBUG("FB[0x%02X]", idx);
296 DEBUG("IMM 0x%08X\n", val);
300 case ATOM_SRC_WORD16:
304 DEBUG("IMM 0x%04X\n", val);
308 case ATOM_SRC_BYTE16:
309 case ATOM_SRC_BYTE24:
313 DEBUG("IMM 0x%02X\n", val);
321 DEBUG("PLL[0x%02X]", idx);
322 val = gctx->card->pll_read(gctx->card, idx);
328 DEBUG("MC[0x%02X]", idx);
329 val = gctx->card->mc_read(gctx->card, idx);
334 val &= atom_arg_mask[align];
335 val >>= atom_arg_shift[align];
339 DEBUG(".[31:0] -> 0x%08X\n", val);
342 DEBUG(".[15:0] -> 0x%04X\n", val);
345 DEBUG(".[23:8] -> 0x%04X\n", val);
347 case ATOM_SRC_WORD16:
348 DEBUG(".[31:16] -> 0x%04X\n", val);
351 DEBUG(".[7:0] -> 0x%02X\n", val);
354 DEBUG(".[15:8] -> 0x%02X\n", val);
356 case ATOM_SRC_BYTE16:
357 DEBUG(".[23:16] -> 0x%02X\n", val);
359 case ATOM_SRC_BYTE24:
360 DEBUG(".[31:24] -> 0x%02X\n", val);
366 static void atom_skip_src_int(atom_exec_context *ctx, uint8_t attr, int *ptr)
368 uint32_t align = (attr >> 3) & 7, arg = attr & 7;
388 case ATOM_SRC_WORD16:
393 case ATOM_SRC_BYTE16:
394 case ATOM_SRC_BYTE24:
402 static uint32_t atom_get_src(atom_exec_context *ctx, uint8_t attr, int *ptr)
404 return atom_get_src_int(ctx, attr, ptr, NULL, 1);
407 static uint32_t atom_get_src_direct(atom_exec_context *ctx, uint8_t align, int *ptr)
409 uint32_t val = 0xCDCDCDCD;
418 case ATOM_SRC_WORD16:
424 case ATOM_SRC_BYTE16:
425 case ATOM_SRC_BYTE24:
433 static uint32_t atom_get_dst(atom_exec_context *ctx, int arg, uint8_t attr,
434 int *ptr, uint32_t *saved, int print)
436 return atom_get_src_int(ctx,
437 arg | atom_dst_to_src[(attr >> 3) &
438 7][(attr >> 6) & 3] << 3,
442 static void atom_skip_dst(atom_exec_context *ctx, int arg, uint8_t attr, int *ptr)
444 atom_skip_src_int(ctx,
445 arg | atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) &
449 static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
450 int *ptr, uint32_t val, uint32_t saved)
453 atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3], old_val =
455 struct atom_context *gctx = ctx->ctx;
456 old_val &= atom_arg_mask[align] >> atom_arg_shift[align];
457 val <<= atom_arg_shift[align];
458 val &= atom_arg_mask[align];
459 saved &= ~atom_arg_mask[align];
465 DEBUG("REG[0x%04X]", idx);
466 idx += gctx->reg_block;
467 switch (gctx->io_mode) {
470 gctx->card->reg_write(gctx->card, idx,
473 gctx->card->reg_write(gctx->card, idx, val);
476 pr_info("PCI registers are not implemented\n");
479 pr_info("SYSIO registers are not implemented\n");
482 if (!(gctx->io_mode & 0x80)) {
483 pr_info("Bad IO mode\n");
486 if (!gctx->iio[gctx->io_mode & 0xFF]) {
487 pr_info("Undefined indirect IO write method %d\n",
488 gctx->io_mode & 0x7F);
491 atom_iio_execute(gctx, gctx->iio[gctx->io_mode & 0xFF],
498 DEBUG("PS[0x%02X]", idx);
499 ctx->ps[idx] = cpu_to_le32(val);
504 DEBUG("WS[0x%02X]", idx);
506 case ATOM_WS_QUOTIENT:
507 gctx->divmul[0] = val;
509 case ATOM_WS_REMAINDER:
510 gctx->divmul[1] = val;
512 case ATOM_WS_DATAPTR:
513 gctx->data_block = val;
518 case ATOM_WS_OR_MASK:
519 case ATOM_WS_AND_MASK:
521 case ATOM_WS_FB_WINDOW:
524 case ATOM_WS_ATTRIBUTES:
528 gctx->reg_block = val;
537 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) {
538 DRM_ERROR("ATOM: fb write beyond scratch region: %d vs. %d\n",
539 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes);
541 gctx->scratch[(gctx->fb_base / 4) + idx] = val;
542 DEBUG("FB[0x%02X]", idx);
547 DEBUG("PLL[0x%02X]", idx);
548 gctx->card->pll_write(gctx->card, idx, val);
553 DEBUG("MC[0x%02X]", idx);
554 gctx->card->mc_write(gctx->card, idx, val);
559 DEBUG(".[31:0] <- 0x%08X\n", old_val);
562 DEBUG(".[15:0] <- 0x%04X\n", old_val);
565 DEBUG(".[23:8] <- 0x%04X\n", old_val);
567 case ATOM_SRC_WORD16:
568 DEBUG(".[31:16] <- 0x%04X\n", old_val);
571 DEBUG(".[7:0] <- 0x%02X\n", old_val);
574 DEBUG(".[15:8] <- 0x%02X\n", old_val);
576 case ATOM_SRC_BYTE16:
577 DEBUG(".[23:16] <- 0x%02X\n", old_val);
579 case ATOM_SRC_BYTE24:
580 DEBUG(".[31:24] <- 0x%02X\n", old_val);
585 static void atom_op_add(atom_exec_context *ctx, int *ptr, int arg)
587 uint8_t attr = U8((*ptr)++);
588 uint32_t dst, src, saved;
591 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
593 src = atom_get_src(ctx, attr, ptr);
596 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
599 static void atom_op_and(atom_exec_context *ctx, int *ptr, int arg)
601 uint8_t attr = U8((*ptr)++);
602 uint32_t dst, src, saved;
605 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
607 src = atom_get_src(ctx, attr, ptr);
610 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
613 static void atom_op_beep(atom_exec_context *ctx, int *ptr, int arg)
615 printk("ATOM BIOS beeped!\n");
618 static void atom_op_calltable(atom_exec_context *ctx, int *ptr, int arg)
620 int idx = U8((*ptr)++);
623 if (idx < ATOM_TABLE_NAMES_CNT)
624 SDEBUG(" table: %d (%s)\n", idx, atom_table_names[idx]);
626 SDEBUG(" table: %d\n", idx);
627 if (U16(ctx->ctx->cmd_table + 4 + 2 * idx))
628 r = amdgpu_atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift);
634 static void atom_op_clear(atom_exec_context *ctx, int *ptr, int arg)
636 uint8_t attr = U8((*ptr)++);
640 attr |= atom_def_dst[attr >> 3] << 6;
641 atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
643 atom_put_dst(ctx, arg, attr, &dptr, 0, saved);
646 static void atom_op_compare(atom_exec_context *ctx, int *ptr, int arg)
648 uint8_t attr = U8((*ptr)++);
651 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
653 src = atom_get_src(ctx, attr, ptr);
654 ctx->ctx->cs_equal = (dst == src);
655 ctx->ctx->cs_above = (dst > src);
656 SDEBUG(" result: %s %s\n", ctx->ctx->cs_equal ? "EQ" : "NE",
657 ctx->ctx->cs_above ? "GT" : "LE");
660 static void atom_op_delay(atom_exec_context *ctx, int *ptr, int arg)
662 unsigned count = U8((*ptr)++);
663 SDEBUG(" count: %d\n", count);
664 if (arg == ATOM_UNIT_MICROSEC)
666 else if (!drm_can_sleep())
672 static void atom_op_div(atom_exec_context *ctx, int *ptr, int arg)
674 uint8_t attr = U8((*ptr)++);
677 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
679 src = atom_get_src(ctx, attr, ptr);
681 ctx->ctx->divmul[0] = dst / src;
682 ctx->ctx->divmul[1] = dst % src;
684 ctx->ctx->divmul[0] = 0;
685 ctx->ctx->divmul[1] = 0;
689 static void atom_op_div32(atom_exec_context *ctx, int *ptr, int arg)
692 uint8_t attr = U8((*ptr)++);
695 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
697 src = atom_get_src(ctx, attr, ptr);
700 val64 |= ((uint64_t)ctx->ctx->divmul[1]) << 32;
702 ctx->ctx->divmul[0] = lower_32_bits(val64);
703 ctx->ctx->divmul[1] = upper_32_bits(val64);
705 ctx->ctx->divmul[0] = 0;
706 ctx->ctx->divmul[1] = 0;
710 static void atom_op_eot(atom_exec_context *ctx, int *ptr, int arg)
712 /* functionally, a nop */
715 static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
717 int execute = 0, target = U16(*ptr);
718 unsigned long cjiffies;
722 case ATOM_COND_ABOVE:
723 execute = ctx->ctx->cs_above;
725 case ATOM_COND_ABOVEOREQUAL:
726 execute = ctx->ctx->cs_above || ctx->ctx->cs_equal;
728 case ATOM_COND_ALWAYS:
731 case ATOM_COND_BELOW:
732 execute = !(ctx->ctx->cs_above || ctx->ctx->cs_equal);
734 case ATOM_COND_BELOWOREQUAL:
735 execute = !ctx->ctx->cs_above;
737 case ATOM_COND_EQUAL:
738 execute = ctx->ctx->cs_equal;
740 case ATOM_COND_NOTEQUAL:
741 execute = !ctx->ctx->cs_equal;
744 if (arg != ATOM_COND_ALWAYS)
745 SDEBUG(" taken: %s\n", str_yes_no(execute));
746 SDEBUG(" target: 0x%04X\n", target);
748 if (ctx->last_jump == (ctx->start + target)) {
750 if (time_after(cjiffies, ctx->last_jump_jiffies)) {
751 cjiffies -= ctx->last_jump_jiffies;
752 if ((jiffies_to_msecs(cjiffies) > ATOM_CMD_TIMEOUT_SEC*1000)) {
753 DRM_ERROR("atombios stuck in loop for more than %dsecs aborting\n",
754 ATOM_CMD_TIMEOUT_SEC);
758 /* jiffies wrap around we will just wait a little longer */
759 ctx->last_jump_jiffies = jiffies;
762 ctx->last_jump = ctx->start + target;
763 ctx->last_jump_jiffies = jiffies;
765 *ptr = ctx->start + target;
769 static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
771 uint8_t attr = U8((*ptr)++);
772 uint32_t dst, mask, src, saved;
775 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
776 mask = atom_get_src_direct(ctx, ((attr >> 3) & 7), ptr);
777 SDEBUG(" mask: 0x%08x", mask);
779 src = atom_get_src(ctx, attr, ptr);
783 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
786 static void atom_op_move(atom_exec_context *ctx, int *ptr, int arg)
788 uint8_t attr = U8((*ptr)++);
791 if (((attr >> 3) & 7) != ATOM_SRC_DWORD)
792 atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
794 atom_skip_dst(ctx, arg, attr, ptr);
798 src = atom_get_src(ctx, attr, ptr);
800 atom_put_dst(ctx, arg, attr, &dptr, src, saved);
803 static void atom_op_mul(atom_exec_context *ctx, int *ptr, int arg)
805 uint8_t attr = U8((*ptr)++);
808 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
810 src = atom_get_src(ctx, attr, ptr);
811 ctx->ctx->divmul[0] = dst * src;
814 static void atom_op_mul32(atom_exec_context *ctx, int *ptr, int arg)
817 uint8_t attr = U8((*ptr)++);
820 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
822 src = atom_get_src(ctx, attr, ptr);
823 val64 = (uint64_t)dst * (uint64_t)src;
824 ctx->ctx->divmul[0] = lower_32_bits(val64);
825 ctx->ctx->divmul[1] = upper_32_bits(val64);
828 static void atom_op_nop(atom_exec_context *ctx, int *ptr, int arg)
833 static void atom_op_or(atom_exec_context *ctx, int *ptr, int arg)
835 uint8_t attr = U8((*ptr)++);
836 uint32_t dst, src, saved;
839 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
841 src = atom_get_src(ctx, attr, ptr);
844 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
847 static void atom_op_postcard(atom_exec_context *ctx, int *ptr, int arg)
849 uint8_t val = U8((*ptr)++);
850 SDEBUG("POST card output: 0x%02X\n", val);
853 static void atom_op_repeat(atom_exec_context *ctx, int *ptr, int arg)
855 pr_info("unimplemented!\n");
858 static void atom_op_restorereg(atom_exec_context *ctx, int *ptr, int arg)
860 pr_info("unimplemented!\n");
863 static void atom_op_savereg(atom_exec_context *ctx, int *ptr, int arg)
865 pr_info("unimplemented!\n");
868 static void atom_op_setdatablock(atom_exec_context *ctx, int *ptr, int arg)
872 SDEBUG(" block: %d\n", idx);
874 ctx->ctx->data_block = 0;
876 ctx->ctx->data_block = ctx->start;
878 ctx->ctx->data_block = U16(ctx->ctx->data_table + 4 + 2 * idx);
879 SDEBUG(" base: 0x%04X\n", ctx->ctx->data_block);
882 static void atom_op_setfbbase(atom_exec_context *ctx, int *ptr, int arg)
884 uint8_t attr = U8((*ptr)++);
885 SDEBUG(" fb_base: ");
886 ctx->ctx->fb_base = atom_get_src(ctx, attr, ptr);
889 static void atom_op_setport(atom_exec_context *ctx, int *ptr, int arg)
895 if (port < ATOM_IO_NAMES_CNT)
896 SDEBUG(" port: %d (%s)\n", port, atom_io_names[port]);
898 SDEBUG(" port: %d\n", port);
900 ctx->ctx->io_mode = ATOM_IO_MM;
902 ctx->ctx->io_mode = ATOM_IO_IIO | port;
906 ctx->ctx->io_mode = ATOM_IO_PCI;
909 case ATOM_PORT_SYSIO:
910 ctx->ctx->io_mode = ATOM_IO_SYSIO;
916 static void atom_op_setregblock(atom_exec_context *ctx, int *ptr, int arg)
918 ctx->ctx->reg_block = U16(*ptr);
920 SDEBUG(" base: 0x%04X\n", ctx->ctx->reg_block);
923 static void atom_op_shift_left(atom_exec_context *ctx, int *ptr, int arg)
925 uint8_t attr = U8((*ptr)++), shift;
929 attr |= atom_def_dst[attr >> 3] << 6;
931 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
932 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
933 SDEBUG(" shift: %d\n", shift);
936 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
939 static void atom_op_shift_right(atom_exec_context *ctx, int *ptr, int arg)
941 uint8_t attr = U8((*ptr)++), shift;
945 attr |= atom_def_dst[attr >> 3] << 6;
947 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
948 shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
949 SDEBUG(" shift: %d\n", shift);
952 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
955 static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg)
957 uint8_t attr = U8((*ptr)++), shift;
960 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
962 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
963 /* op needs to full dst value */
965 shift = atom_get_src(ctx, attr, ptr);
966 SDEBUG(" shift: %d\n", shift);
968 dst &= atom_arg_mask[dst_align];
969 dst >>= atom_arg_shift[dst_align];
971 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
974 static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg)
976 uint8_t attr = U8((*ptr)++), shift;
979 uint32_t dst_align = atom_dst_to_src[(attr >> 3) & 7][(attr >> 6) & 3];
981 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
982 /* op needs to full dst value */
984 shift = atom_get_src(ctx, attr, ptr);
985 SDEBUG(" shift: %d\n", shift);
987 dst &= atom_arg_mask[dst_align];
988 dst >>= atom_arg_shift[dst_align];
990 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
993 static void atom_op_sub(atom_exec_context *ctx, int *ptr, int arg)
995 uint8_t attr = U8((*ptr)++);
996 uint32_t dst, src, saved;
999 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
1001 src = atom_get_src(ctx, attr, ptr);
1004 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
1007 static void atom_op_switch(atom_exec_context *ctx, int *ptr, int arg)
1009 uint8_t attr = U8((*ptr)++);
1010 uint32_t src, val, target;
1011 SDEBUG(" switch: ");
1012 src = atom_get_src(ctx, attr, ptr);
1013 while (U16(*ptr) != ATOM_CASE_END)
1014 if (U8(*ptr) == ATOM_CASE_MAGIC) {
1018 atom_get_src(ctx, (attr & 0x38) | ATOM_ARG_IMM,
1022 SDEBUG(" target: %04X\n", target);
1023 *ptr = ctx->start + target;
1028 pr_info("Bad case\n");
1034 static void atom_op_test(atom_exec_context *ctx, int *ptr, int arg)
1036 uint8_t attr = U8((*ptr)++);
1039 dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
1041 src = atom_get_src(ctx, attr, ptr);
1042 ctx->ctx->cs_equal = ((dst & src) == 0);
1043 SDEBUG(" result: %s\n", ctx->ctx->cs_equal ? "EQ" : "NE");
1046 static void atom_op_xor(atom_exec_context *ctx, int *ptr, int arg)
1048 uint8_t attr = U8((*ptr)++);
1049 uint32_t dst, src, saved;
1052 dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
1054 src = atom_get_src(ctx, attr, ptr);
1057 atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
1060 static void atom_op_debug(atom_exec_context *ctx, int *ptr, int arg)
1062 uint8_t val = U8((*ptr)++);
1063 SDEBUG("DEBUG output: 0x%02X\n", val);
1066 static void atom_op_processds(atom_exec_context *ctx, int *ptr, int arg)
1068 uint16_t val = U16(*ptr);
1070 SDEBUG("PROCESSDS output: 0x%02X\n", val);
1074 void (*func) (atom_exec_context *, int *, int);
1076 } opcode_table[ATOM_OP_CNT] = {
1079 atom_op_move, ATOM_ARG_REG}, {
1080 atom_op_move, ATOM_ARG_PS}, {
1081 atom_op_move, ATOM_ARG_WS}, {
1082 atom_op_move, ATOM_ARG_FB}, {
1083 atom_op_move, ATOM_ARG_PLL}, {
1084 atom_op_move, ATOM_ARG_MC}, {
1085 atom_op_and, ATOM_ARG_REG}, {
1086 atom_op_and, ATOM_ARG_PS}, {
1087 atom_op_and, ATOM_ARG_WS}, {
1088 atom_op_and, ATOM_ARG_FB}, {
1089 atom_op_and, ATOM_ARG_PLL}, {
1090 atom_op_and, ATOM_ARG_MC}, {
1091 atom_op_or, ATOM_ARG_REG}, {
1092 atom_op_or, ATOM_ARG_PS}, {
1093 atom_op_or, ATOM_ARG_WS}, {
1094 atom_op_or, ATOM_ARG_FB}, {
1095 atom_op_or, ATOM_ARG_PLL}, {
1096 atom_op_or, ATOM_ARG_MC}, {
1097 atom_op_shift_left, ATOM_ARG_REG}, {
1098 atom_op_shift_left, ATOM_ARG_PS}, {
1099 atom_op_shift_left, ATOM_ARG_WS}, {
1100 atom_op_shift_left, ATOM_ARG_FB}, {
1101 atom_op_shift_left, ATOM_ARG_PLL}, {
1102 atom_op_shift_left, ATOM_ARG_MC}, {
1103 atom_op_shift_right, ATOM_ARG_REG}, {
1104 atom_op_shift_right, ATOM_ARG_PS}, {
1105 atom_op_shift_right, ATOM_ARG_WS}, {
1106 atom_op_shift_right, ATOM_ARG_FB}, {
1107 atom_op_shift_right, ATOM_ARG_PLL}, {
1108 atom_op_shift_right, ATOM_ARG_MC}, {
1109 atom_op_mul, ATOM_ARG_REG}, {
1110 atom_op_mul, ATOM_ARG_PS}, {
1111 atom_op_mul, ATOM_ARG_WS}, {
1112 atom_op_mul, ATOM_ARG_FB}, {
1113 atom_op_mul, ATOM_ARG_PLL}, {
1114 atom_op_mul, ATOM_ARG_MC}, {
1115 atom_op_div, ATOM_ARG_REG}, {
1116 atom_op_div, ATOM_ARG_PS}, {
1117 atom_op_div, ATOM_ARG_WS}, {
1118 atom_op_div, ATOM_ARG_FB}, {
1119 atom_op_div, ATOM_ARG_PLL}, {
1120 atom_op_div, ATOM_ARG_MC}, {
1121 atom_op_add, ATOM_ARG_REG}, {
1122 atom_op_add, ATOM_ARG_PS}, {
1123 atom_op_add, ATOM_ARG_WS}, {
1124 atom_op_add, ATOM_ARG_FB}, {
1125 atom_op_add, ATOM_ARG_PLL}, {
1126 atom_op_add, ATOM_ARG_MC}, {
1127 atom_op_sub, ATOM_ARG_REG}, {
1128 atom_op_sub, ATOM_ARG_PS}, {
1129 atom_op_sub, ATOM_ARG_WS}, {
1130 atom_op_sub, ATOM_ARG_FB}, {
1131 atom_op_sub, ATOM_ARG_PLL}, {
1132 atom_op_sub, ATOM_ARG_MC}, {
1133 atom_op_setport, ATOM_PORT_ATI}, {
1134 atom_op_setport, ATOM_PORT_PCI}, {
1135 atom_op_setport, ATOM_PORT_SYSIO}, {
1136 atom_op_setregblock, 0}, {
1137 atom_op_setfbbase, 0}, {
1138 atom_op_compare, ATOM_ARG_REG}, {
1139 atom_op_compare, ATOM_ARG_PS}, {
1140 atom_op_compare, ATOM_ARG_WS}, {
1141 atom_op_compare, ATOM_ARG_FB}, {
1142 atom_op_compare, ATOM_ARG_PLL}, {
1143 atom_op_compare, ATOM_ARG_MC}, {
1144 atom_op_switch, 0}, {
1145 atom_op_jump, ATOM_COND_ALWAYS}, {
1146 atom_op_jump, ATOM_COND_EQUAL}, {
1147 atom_op_jump, ATOM_COND_BELOW}, {
1148 atom_op_jump, ATOM_COND_ABOVE}, {
1149 atom_op_jump, ATOM_COND_BELOWOREQUAL}, {
1150 atom_op_jump, ATOM_COND_ABOVEOREQUAL}, {
1151 atom_op_jump, ATOM_COND_NOTEQUAL}, {
1152 atom_op_test, ATOM_ARG_REG}, {
1153 atom_op_test, ATOM_ARG_PS}, {
1154 atom_op_test, ATOM_ARG_WS}, {
1155 atom_op_test, ATOM_ARG_FB}, {
1156 atom_op_test, ATOM_ARG_PLL}, {
1157 atom_op_test, ATOM_ARG_MC}, {
1158 atom_op_delay, ATOM_UNIT_MILLISEC}, {
1159 atom_op_delay, ATOM_UNIT_MICROSEC}, {
1160 atom_op_calltable, 0}, {
1161 atom_op_repeat, 0}, {
1162 atom_op_clear, ATOM_ARG_REG}, {
1163 atom_op_clear, ATOM_ARG_PS}, {
1164 atom_op_clear, ATOM_ARG_WS}, {
1165 atom_op_clear, ATOM_ARG_FB}, {
1166 atom_op_clear, ATOM_ARG_PLL}, {
1167 atom_op_clear, ATOM_ARG_MC}, {
1170 atom_op_mask, ATOM_ARG_REG}, {
1171 atom_op_mask, ATOM_ARG_PS}, {
1172 atom_op_mask, ATOM_ARG_WS}, {
1173 atom_op_mask, ATOM_ARG_FB}, {
1174 atom_op_mask, ATOM_ARG_PLL}, {
1175 atom_op_mask, ATOM_ARG_MC}, {
1176 atom_op_postcard, 0}, {
1178 atom_op_savereg, 0}, {
1179 atom_op_restorereg, 0}, {
1180 atom_op_setdatablock, 0}, {
1181 atom_op_xor, ATOM_ARG_REG}, {
1182 atom_op_xor, ATOM_ARG_PS}, {
1183 atom_op_xor, ATOM_ARG_WS}, {
1184 atom_op_xor, ATOM_ARG_FB}, {
1185 atom_op_xor, ATOM_ARG_PLL}, {
1186 atom_op_xor, ATOM_ARG_MC}, {
1187 atom_op_shl, ATOM_ARG_REG}, {
1188 atom_op_shl, ATOM_ARG_PS}, {
1189 atom_op_shl, ATOM_ARG_WS}, {
1190 atom_op_shl, ATOM_ARG_FB}, {
1191 atom_op_shl, ATOM_ARG_PLL}, {
1192 atom_op_shl, ATOM_ARG_MC}, {
1193 atom_op_shr, ATOM_ARG_REG}, {
1194 atom_op_shr, ATOM_ARG_PS}, {
1195 atom_op_shr, ATOM_ARG_WS}, {
1196 atom_op_shr, ATOM_ARG_FB}, {
1197 atom_op_shr, ATOM_ARG_PLL}, {
1198 atom_op_shr, ATOM_ARG_MC}, {
1199 atom_op_debug, 0}, {
1200 atom_op_processds, 0}, {
1201 atom_op_mul32, ATOM_ARG_PS}, {
1202 atom_op_mul32, ATOM_ARG_WS}, {
1203 atom_op_div32, ATOM_ARG_PS}, {
1204 atom_op_div32, ATOM_ARG_WS},
1207 static int amdgpu_atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t *params)
1209 int base = CU16(ctx->cmd_table + 4 + 2 * index);
1210 int len, ws, ps, ptr;
1212 atom_exec_context ectx;
1218 len = CU16(base + ATOM_CT_SIZE_PTR);
1219 ws = CU8(base + ATOM_CT_WS_PTR);
1220 ps = CU8(base + ATOM_CT_PS_PTR) & ATOM_CT_PS_MASK;
1221 ptr = base + ATOM_CT_CODE_PTR;
1223 SDEBUG(">> execute %04X (len %d, WS %d, PS %d)\n", base, len, ws, ps);
1226 ectx.ps_shift = ps / 4;
1232 ectx.ws = kcalloc(4, ws, GFP_KERNEL);
1239 if (op < ATOM_OP_NAMES_CNT)
1240 SDEBUG("%s @ 0x%04X\n", atom_op_names[op], ptr - 1);
1242 SDEBUG("[%d] @ 0x%04X\n", op, ptr - 1);
1244 DRM_ERROR("atombios stuck executing %04X (len %d, WS %d, PS %d) @ 0x%04X\n",
1245 base, len, ws, ps, ptr - 1);
1250 if (op < ATOM_OP_CNT && op > 0)
1251 opcode_table[op].func(&ectx, &ptr,
1252 opcode_table[op].arg);
1256 if (op == ATOM_OP_EOT)
1268 int amdgpu_atom_execute_table(struct atom_context *ctx, int index, uint32_t *params)
1272 mutex_lock(&ctx->mutex);
1273 /* reset data block */
1274 ctx->data_block = 0;
1275 /* reset reg block */
1277 /* reset fb window */
1280 ctx->io_mode = ATOM_IO_MM;
1284 r = amdgpu_atom_execute_table_locked(ctx, index, params);
1285 mutex_unlock(&ctx->mutex);
1289 static int atom_iio_len[] = { 1, 2, 3, 3, 3, 3, 4, 4, 4, 3 };
1291 static void atom_index_iio(struct atom_context *ctx, int base)
1293 ctx->iio = kzalloc(2 * 256, GFP_KERNEL);
1296 while (CU8(base) == ATOM_IIO_START) {
1297 ctx->iio[CU8(base + 1)] = base + 2;
1299 while (CU8(base) != ATOM_IIO_END)
1300 base += atom_iio_len[CU8(base)];
1305 static void atom_get_vbios_name(struct atom_context *ctx)
1307 unsigned char *p_rom;
1308 unsigned char str_num;
1309 unsigned short off_to_vbios_str;
1310 unsigned char *c_ptr;
1314 const char *na = "--N/A--";
1319 str_num = *(p_rom + OFFSET_TO_GET_ATOMBIOS_NUMBER_OF_STRINGS);
1322 *(unsigned short *)(p_rom + OFFSET_TO_GET_ATOMBIOS_STRING_START);
1324 c_ptr = (unsigned char *)(p_rom + off_to_vbios_str);
1326 /* do not know where to find name */
1327 memcpy(ctx->name, na, 7);
1333 * skip the atombios strings, usually 4
1334 * 1st is P/N, 2nd is ASIC, 3rd is PCI type, 4th is Memory type
1336 for (i = 0; i < str_num; i++) {
1342 /* skip the following 2 chars: 0x0D 0x0A */
1345 name_size = strnlen(c_ptr, STRLEN_LONG - 1);
1346 memcpy(ctx->name, c_ptr, name_size);
1347 back = ctx->name + name_size;
1348 while ((*--back) == ' ')
1353 static void atom_get_vbios_date(struct atom_context *ctx)
1355 unsigned char *p_rom;
1356 unsigned char *date_in_rom;
1360 date_in_rom = p_rom + OFFSET_TO_VBIOS_DATE;
1364 ctx->date[2] = date_in_rom[6];
1365 ctx->date[3] = date_in_rom[7];
1367 ctx->date[5] = date_in_rom[0];
1368 ctx->date[6] = date_in_rom[1];
1370 ctx->date[8] = date_in_rom[3];
1371 ctx->date[9] = date_in_rom[4];
1372 ctx->date[10] = ' ';
1373 ctx->date[11] = date_in_rom[9];
1374 ctx->date[12] = date_in_rom[10];
1375 ctx->date[13] = date_in_rom[11];
1376 ctx->date[14] = date_in_rom[12];
1377 ctx->date[15] = date_in_rom[13];
1378 ctx->date[16] = '\0';
1381 static unsigned char *atom_find_str_in_rom(struct atom_context *ctx, char *str, int start,
1382 int end, int maxlen)
1384 unsigned long str_off;
1385 unsigned char *p_rom;
1386 unsigned short str_len;
1389 str_len = strnlen(str, maxlen);
1392 for (; start <= end; ++start) {
1393 for (str_off = 0; str_off < str_len; ++str_off) {
1394 if (str[str_off] != *(p_rom + start + str_off))
1398 if (str_off == str_len || str[str_off] == 0)
1399 return p_rom + start;
1404 static void atom_get_vbios_pn(struct atom_context *ctx)
1406 unsigned char *p_rom;
1407 unsigned short off_to_vbios_str;
1408 unsigned char *vbios_str;
1411 off_to_vbios_str = 0;
1414 if (*(p_rom + OFFSET_TO_GET_ATOMBIOS_NUMBER_OF_STRINGS) != 0) {
1416 *(unsigned short *)(p_rom + OFFSET_TO_GET_ATOMBIOS_STRING_START);
1418 vbios_str = (unsigned char *)(p_rom + off_to_vbios_str);
1420 vbios_str = p_rom + OFFSET_TO_VBIOS_PART_NUMBER;
1423 if (*vbios_str == 0) {
1424 vbios_str = atom_find_str_in_rom(ctx, BIOS_ATOM_PREFIX, 3, 1024, 64);
1425 if (vbios_str == NULL)
1426 vbios_str += sizeof(BIOS_ATOM_PREFIX) - 1;
1428 if (vbios_str != NULL && *vbios_str == 0)
1431 if (vbios_str != NULL) {
1433 while ((count < BIOS_STRING_LENGTH) && vbios_str[count] >= ' ' &&
1434 vbios_str[count] <= 'z') {
1435 ctx->vbios_pn[count] = vbios_str[count];
1439 ctx->vbios_pn[count] = 0;
1442 pr_info("ATOM BIOS: %s\n", ctx->vbios_pn);
1445 static void atom_get_vbios_version(struct atom_context *ctx)
1447 unsigned short start = 3, end;
1448 unsigned char *vbios_ver;
1449 unsigned char *p_rom;
1452 /* Search from strings offset if it's present */
1453 start = *(unsigned short *)(p_rom +
1454 OFFSET_TO_GET_ATOMBIOS_STRING_START);
1456 /* Search till atom rom header start point */
1457 end = *(unsigned short *)(p_rom + OFFSET_TO_ATOM_ROM_HEADER_POINTER);
1459 /* Use hardcoded offsets, if the offsets are not populated */
1465 /* find anchor ATOMBIOSBK-AMD */
1467 atom_find_str_in_rom(ctx, BIOS_VERSION_PREFIX, start, end, 64);
1468 if (vbios_ver != NULL) {
1469 /* skip ATOMBIOSBK-AMD VER */
1471 memcpy(ctx->vbios_ver_str, vbios_ver, STRLEN_NORMAL);
1473 ctx->vbios_ver_str[0] = '\0';
1477 struct atom_context *amdgpu_atom_parse(struct card_info *card, void *bios)
1480 struct atom_context *ctx =
1481 kzalloc(sizeof(struct atom_context), GFP_KERNEL);
1482 struct _ATOM_ROM_HEADER *atom_rom_header;
1483 struct _ATOM_MASTER_DATA_TABLE *master_table;
1484 struct _ATOM_FIRMWARE_INFO *atom_fw_info;
1492 if (CU16(0) != ATOM_BIOS_MAGIC) {
1493 pr_info("Invalid BIOS magic\n");
1498 (CSTR(ATOM_ATI_MAGIC_PTR), ATOM_ATI_MAGIC,
1499 strlen(ATOM_ATI_MAGIC))) {
1500 pr_info("Invalid ATI magic\n");
1505 base = CU16(ATOM_ROM_TABLE_PTR);
1507 (CSTR(base + ATOM_ROM_MAGIC_PTR), ATOM_ROM_MAGIC,
1508 strlen(ATOM_ROM_MAGIC))) {
1509 pr_info("Invalid ATOM magic\n");
1514 ctx->cmd_table = CU16(base + ATOM_ROM_CMD_PTR);
1515 ctx->data_table = CU16(base + ATOM_ROM_DATA_PTR);
1516 atom_index_iio(ctx, CU16(ctx->data_table + ATOM_DATA_IIO_PTR) + 4);
1518 amdgpu_atom_destroy(ctx);
1522 atom_rom_header = (struct _ATOM_ROM_HEADER *)CSTR(base);
1523 if (atom_rom_header->usMasterDataTableOffset != 0) {
1524 master_table = (struct _ATOM_MASTER_DATA_TABLE *)
1525 CSTR(atom_rom_header->usMasterDataTableOffset);
1526 if (master_table->ListOfDataTables.FirmwareInfo != 0) {
1527 atom_fw_info = (struct _ATOM_FIRMWARE_INFO *)
1528 CSTR(master_table->ListOfDataTables.FirmwareInfo);
1529 ctx->version = atom_fw_info->ulFirmwareRevision;
1533 atom_get_vbios_name(ctx);
1534 atom_get_vbios_pn(ctx);
1535 atom_get_vbios_date(ctx);
1536 atom_get_vbios_version(ctx);
1541 int amdgpu_atom_asic_init(struct atom_context *ctx)
1543 int hwi = CU16(ctx->data_table + ATOM_DATA_FWI_PTR);
1549 ps[0] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFSCLK_PTR));
1550 ps[1] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFMCLK_PTR));
1551 if (!ps[0] || !ps[1])
1554 if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT))
1556 ret = amdgpu_atom_execute_table(ctx, ATOM_CMD_INIT, ps);
1565 void amdgpu_atom_destroy(struct atom_context *ctx)
1571 bool amdgpu_atom_parse_data_header(struct atom_context *ctx, int index,
1572 uint16_t *size, uint8_t *frev, uint8_t *crev,
1573 uint16_t *data_start)
1575 int offset = index * 2 + 4;
1576 int idx = CU16(ctx->data_table + offset);
1577 u16 *mdt = (u16 *)(ctx->bios + ctx->data_table + 4);
1585 *frev = CU8(idx + 2);
1587 *crev = CU8(idx + 3);
1592 bool amdgpu_atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t *frev,
1595 int offset = index * 2 + 4;
1596 int idx = CU16(ctx->cmd_table + offset);
1597 u16 *mct = (u16 *)(ctx->bios + ctx->cmd_table + 4);
1603 *frev = CU8(idx + 2);
1605 *crev = CU8(idx + 3);