5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_util.h"
31 struct nv10_graph_engine {
32 struct nouveau_exec_engine base;
36 uint32_t pipe_0x0000[0x040/4];
37 uint32_t pipe_0x0040[0x010/4];
38 uint32_t pipe_0x0200[0x0c0/4];
39 uint32_t pipe_0x4400[0x080/4];
40 uint32_t pipe_0x6400[0x3b0/4];
41 uint32_t pipe_0x6800[0x2f0/4];
42 uint32_t pipe_0x6c00[0x030/4];
43 uint32_t pipe_0x7000[0x130/4];
44 uint32_t pipe_0x7400[0x0c0/4];
45 uint32_t pipe_0x7800[0x0c0/4];
48 static int nv10_graph_ctx_regs[] = {
49 NV10_PGRAPH_CTX_SWITCH(0),
50 NV10_PGRAPH_CTX_SWITCH(1),
51 NV10_PGRAPH_CTX_SWITCH(2),
52 NV10_PGRAPH_CTX_SWITCH(3),
53 NV10_PGRAPH_CTX_SWITCH(4),
54 NV10_PGRAPH_CTX_CACHE(0, 0),
55 NV10_PGRAPH_CTX_CACHE(0, 1),
56 NV10_PGRAPH_CTX_CACHE(0, 2),
57 NV10_PGRAPH_CTX_CACHE(0, 3),
58 NV10_PGRAPH_CTX_CACHE(0, 4),
59 NV10_PGRAPH_CTX_CACHE(1, 0),
60 NV10_PGRAPH_CTX_CACHE(1, 1),
61 NV10_PGRAPH_CTX_CACHE(1, 2),
62 NV10_PGRAPH_CTX_CACHE(1, 3),
63 NV10_PGRAPH_CTX_CACHE(1, 4),
64 NV10_PGRAPH_CTX_CACHE(2, 0),
65 NV10_PGRAPH_CTX_CACHE(2, 1),
66 NV10_PGRAPH_CTX_CACHE(2, 2),
67 NV10_PGRAPH_CTX_CACHE(2, 3),
68 NV10_PGRAPH_CTX_CACHE(2, 4),
69 NV10_PGRAPH_CTX_CACHE(3, 0),
70 NV10_PGRAPH_CTX_CACHE(3, 1),
71 NV10_PGRAPH_CTX_CACHE(3, 2),
72 NV10_PGRAPH_CTX_CACHE(3, 3),
73 NV10_PGRAPH_CTX_CACHE(3, 4),
74 NV10_PGRAPH_CTX_CACHE(4, 0),
75 NV10_PGRAPH_CTX_CACHE(4, 1),
76 NV10_PGRAPH_CTX_CACHE(4, 2),
77 NV10_PGRAPH_CTX_CACHE(4, 3),
78 NV10_PGRAPH_CTX_CACHE(4, 4),
79 NV10_PGRAPH_CTX_CACHE(5, 0),
80 NV10_PGRAPH_CTX_CACHE(5, 1),
81 NV10_PGRAPH_CTX_CACHE(5, 2),
82 NV10_PGRAPH_CTX_CACHE(5, 3),
83 NV10_PGRAPH_CTX_CACHE(5, 4),
84 NV10_PGRAPH_CTX_CACHE(6, 0),
85 NV10_PGRAPH_CTX_CACHE(6, 1),
86 NV10_PGRAPH_CTX_CACHE(6, 2),
87 NV10_PGRAPH_CTX_CACHE(6, 3),
88 NV10_PGRAPH_CTX_CACHE(6, 4),
89 NV10_PGRAPH_CTX_CACHE(7, 0),
90 NV10_PGRAPH_CTX_CACHE(7, 1),
91 NV10_PGRAPH_CTX_CACHE(7, 2),
92 NV10_PGRAPH_CTX_CACHE(7, 3),
93 NV10_PGRAPH_CTX_CACHE(7, 4),
95 NV04_PGRAPH_DMA_START_0,
96 NV04_PGRAPH_DMA_START_1,
97 NV04_PGRAPH_DMA_LENGTH,
99 NV10_PGRAPH_DMA_PITCH,
100 NV04_PGRAPH_BOFFSET0,
103 NV04_PGRAPH_BOFFSET1,
106 NV04_PGRAPH_BOFFSET2,
109 NV04_PGRAPH_BOFFSET3,
112 NV04_PGRAPH_BOFFSET4,
115 NV04_PGRAPH_BOFFSET5,
125 NV04_PGRAPH_BSWIZZLE2,
126 NV04_PGRAPH_BSWIZZLE5,
129 NV04_PGRAPH_PATT_COLOR0,
130 NV04_PGRAPH_PATT_COLOR1,
131 NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
195 NV04_PGRAPH_PATTERN, /* 2 values from 0x400808 to 0x40080c */
197 NV04_PGRAPH_PATTERN_SHAPE,
198 NV03_PGRAPH_MONO_COLOR0,
201 NV04_PGRAPH_BETA_AND,
202 NV04_PGRAPH_BETA_PREMULT,
218 NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
219 NV10_PGRAPH_WINDOWCLIP_VERTICAL, /* 8 values from 0x400f20-0x400f3c */
236 NV10_PGRAPH_GLOBALSTATE0,
237 NV10_PGRAPH_GLOBALSTATE1,
238 NV04_PGRAPH_STORED_FMT,
239 NV04_PGRAPH_SOURCE_COLOR,
240 NV03_PGRAPH_ABS_X_RAM, /* 32 values from 0x400400 to 0x40047c */
241 NV03_PGRAPH_ABS_Y_RAM, /* 32 values from 0x400480 to 0x4004fc */
304 NV03_PGRAPH_ABS_UCLIP_XMIN,
305 NV03_PGRAPH_ABS_UCLIP_XMAX,
306 NV03_PGRAPH_ABS_UCLIP_YMIN,
307 NV03_PGRAPH_ABS_UCLIP_YMAX,
312 NV03_PGRAPH_ABS_UCLIPA_XMIN,
313 NV03_PGRAPH_ABS_UCLIPA_XMAX,
314 NV03_PGRAPH_ABS_UCLIPA_YMIN,
315 NV03_PGRAPH_ABS_UCLIPA_YMAX,
316 NV03_PGRAPH_ABS_ICLIP_XMAX,
317 NV03_PGRAPH_ABS_ICLIP_YMAX,
318 NV03_PGRAPH_XY_LOGIC_MISC0,
319 NV03_PGRAPH_XY_LOGIC_MISC1,
320 NV03_PGRAPH_XY_LOGIC_MISC2,
321 NV03_PGRAPH_XY_LOGIC_MISC3,
326 NV10_PGRAPH_COMBINER0_IN_ALPHA,
327 NV10_PGRAPH_COMBINER1_IN_ALPHA,
328 NV10_PGRAPH_COMBINER0_IN_RGB,
329 NV10_PGRAPH_COMBINER1_IN_RGB,
330 NV10_PGRAPH_COMBINER_COLOR0,
331 NV10_PGRAPH_COMBINER_COLOR1,
332 NV10_PGRAPH_COMBINER0_OUT_ALPHA,
333 NV10_PGRAPH_COMBINER1_OUT_ALPHA,
334 NV10_PGRAPH_COMBINER0_OUT_RGB,
335 NV10_PGRAPH_COMBINER1_OUT_RGB,
336 NV10_PGRAPH_COMBINER_FINAL0,
337 NV10_PGRAPH_COMBINER_FINAL1,
354 NV04_PGRAPH_PASSTHRU_0,
355 NV04_PGRAPH_PASSTHRU_1,
356 NV04_PGRAPH_PASSTHRU_2,
357 NV10_PGRAPH_DIMX_TEXTURE,
358 NV10_PGRAPH_WDIMX_TEXTURE,
359 NV10_PGRAPH_DVD_COLORFMT,
360 NV10_PGRAPH_SCALED_FORMAT,
361 NV04_PGRAPH_MISC24_0,
362 NV04_PGRAPH_MISC24_1,
363 NV04_PGRAPH_MISC24_2,
370 static int nv17_graph_ctx_regs[] = {
392 int nv10[ARRAY_SIZE(nv10_graph_ctx_regs)];
393 int nv17[ARRAY_SIZE(nv17_graph_ctx_regs)];
394 struct pipe_state pipe_state;
395 uint32_t lma_window[4];
398 #define PIPE_SAVE(dev, state, addr) \
401 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
402 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
403 state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
406 #define PIPE_RESTORE(dev, state, addr) \
409 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
410 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
411 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, state[__i]); \
414 static void nv10_graph_save_pipe(struct nouveau_channel *chan)
416 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
417 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
418 struct drm_device *dev = chan->dev;
420 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
421 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
422 PIPE_SAVE(dev, pipe->pipe_0x6400, 0x6400);
423 PIPE_SAVE(dev, pipe->pipe_0x6800, 0x6800);
424 PIPE_SAVE(dev, pipe->pipe_0x6c00, 0x6c00);
425 PIPE_SAVE(dev, pipe->pipe_0x7000, 0x7000);
426 PIPE_SAVE(dev, pipe->pipe_0x7400, 0x7400);
427 PIPE_SAVE(dev, pipe->pipe_0x7800, 0x7800);
428 PIPE_SAVE(dev, pipe->pipe_0x0040, 0x0040);
429 PIPE_SAVE(dev, pipe->pipe_0x0000, 0x0000);
432 static void nv10_graph_load_pipe(struct nouveau_channel *chan)
434 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
435 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
436 struct drm_device *dev = chan->dev;
437 uint32_t xfmode0, xfmode1;
440 nouveau_wait_for_idle(dev);
441 /* XXX check haiku comments */
442 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
443 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
444 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
445 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
446 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
447 for (i = 0; i < 4; i++)
448 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
449 for (i = 0; i < 4; i++)
450 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
452 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
453 for (i = 0; i < 3; i++)
454 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
456 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
457 for (i = 0; i < 3; i++)
458 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
460 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
461 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
464 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
465 nouveau_wait_for_idle(dev);
468 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
469 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
470 PIPE_RESTORE(dev, pipe->pipe_0x6400, 0x6400);
471 PIPE_RESTORE(dev, pipe->pipe_0x6800, 0x6800);
472 PIPE_RESTORE(dev, pipe->pipe_0x6c00, 0x6c00);
473 PIPE_RESTORE(dev, pipe->pipe_0x7000, 0x7000);
474 PIPE_RESTORE(dev, pipe->pipe_0x7400, 0x7400);
475 PIPE_RESTORE(dev, pipe->pipe_0x7800, 0x7800);
476 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
477 PIPE_RESTORE(dev, pipe->pipe_0x0000, 0x0000);
478 PIPE_RESTORE(dev, pipe->pipe_0x0040, 0x0040);
479 nouveau_wait_for_idle(dev);
482 static void nv10_graph_create_pipe(struct nouveau_channel *chan)
484 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
485 struct pipe_state *fifo_pipe_state = &pgraph_ctx->pipe_state;
486 struct drm_device *dev = chan->dev;
487 uint32_t *fifo_pipe_state_addr;
489 #define PIPE_INIT(addr) \
491 fifo_pipe_state_addr = fifo_pipe_state->pipe_##addr; \
493 #define PIPE_INIT_END(addr) \
495 uint32_t *__end_addr = fifo_pipe_state->pipe_##addr + \
496 ARRAY_SIZE(fifo_pipe_state->pipe_##addr); \
497 if (fifo_pipe_state_addr != __end_addr) \
498 NV_ERROR(dev, "incomplete pipe init for 0x%x : %p/%p\n", \
499 addr, fifo_pipe_state_addr, __end_addr); \
501 #define NV_WRITE_PIPE_INIT(value) *(fifo_pipe_state_addr++) = value
504 for (i = 0; i < 48; i++)
505 NV_WRITE_PIPE_INIT(0x00000000);
506 PIPE_INIT_END(0x0200);
509 for (i = 0; i < 211; i++)
510 NV_WRITE_PIPE_INIT(0x00000000);
511 NV_WRITE_PIPE_INIT(0x3f800000);
512 NV_WRITE_PIPE_INIT(0x40000000);
513 NV_WRITE_PIPE_INIT(0x40000000);
514 NV_WRITE_PIPE_INIT(0x40000000);
515 NV_WRITE_PIPE_INIT(0x40000000);
516 NV_WRITE_PIPE_INIT(0x00000000);
517 NV_WRITE_PIPE_INIT(0x00000000);
518 NV_WRITE_PIPE_INIT(0x3f800000);
519 NV_WRITE_PIPE_INIT(0x00000000);
520 NV_WRITE_PIPE_INIT(0x3f000000);
521 NV_WRITE_PIPE_INIT(0x3f000000);
522 NV_WRITE_PIPE_INIT(0x00000000);
523 NV_WRITE_PIPE_INIT(0x00000000);
524 NV_WRITE_PIPE_INIT(0x00000000);
525 NV_WRITE_PIPE_INIT(0x00000000);
526 NV_WRITE_PIPE_INIT(0x3f800000);
527 NV_WRITE_PIPE_INIT(0x00000000);
528 NV_WRITE_PIPE_INIT(0x00000000);
529 NV_WRITE_PIPE_INIT(0x00000000);
530 NV_WRITE_PIPE_INIT(0x00000000);
531 NV_WRITE_PIPE_INIT(0x00000000);
532 NV_WRITE_PIPE_INIT(0x3f800000);
533 NV_WRITE_PIPE_INIT(0x3f800000);
534 NV_WRITE_PIPE_INIT(0x3f800000);
535 NV_WRITE_PIPE_INIT(0x3f800000);
536 PIPE_INIT_END(0x6400);
539 for (i = 0; i < 162; i++)
540 NV_WRITE_PIPE_INIT(0x00000000);
541 NV_WRITE_PIPE_INIT(0x3f800000);
542 for (i = 0; i < 25; i++)
543 NV_WRITE_PIPE_INIT(0x00000000);
544 PIPE_INIT_END(0x6800);
547 NV_WRITE_PIPE_INIT(0x00000000);
548 NV_WRITE_PIPE_INIT(0x00000000);
549 NV_WRITE_PIPE_INIT(0x00000000);
550 NV_WRITE_PIPE_INIT(0x00000000);
551 NV_WRITE_PIPE_INIT(0xbf800000);
552 NV_WRITE_PIPE_INIT(0x00000000);
553 NV_WRITE_PIPE_INIT(0x00000000);
554 NV_WRITE_PIPE_INIT(0x00000000);
555 NV_WRITE_PIPE_INIT(0x00000000);
556 NV_WRITE_PIPE_INIT(0x00000000);
557 NV_WRITE_PIPE_INIT(0x00000000);
558 NV_WRITE_PIPE_INIT(0x00000000);
559 PIPE_INIT_END(0x6c00);
562 NV_WRITE_PIPE_INIT(0x00000000);
563 NV_WRITE_PIPE_INIT(0x00000000);
564 NV_WRITE_PIPE_INIT(0x00000000);
565 NV_WRITE_PIPE_INIT(0x00000000);
566 NV_WRITE_PIPE_INIT(0x00000000);
567 NV_WRITE_PIPE_INIT(0x00000000);
568 NV_WRITE_PIPE_INIT(0x00000000);
569 NV_WRITE_PIPE_INIT(0x00000000);
570 NV_WRITE_PIPE_INIT(0x00000000);
571 NV_WRITE_PIPE_INIT(0x00000000);
572 NV_WRITE_PIPE_INIT(0x00000000);
573 NV_WRITE_PIPE_INIT(0x00000000);
574 NV_WRITE_PIPE_INIT(0x7149f2ca);
575 NV_WRITE_PIPE_INIT(0x00000000);
576 NV_WRITE_PIPE_INIT(0x00000000);
577 NV_WRITE_PIPE_INIT(0x00000000);
578 NV_WRITE_PIPE_INIT(0x7149f2ca);
579 NV_WRITE_PIPE_INIT(0x00000000);
580 NV_WRITE_PIPE_INIT(0x00000000);
581 NV_WRITE_PIPE_INIT(0x00000000);
582 NV_WRITE_PIPE_INIT(0x7149f2ca);
583 NV_WRITE_PIPE_INIT(0x00000000);
584 NV_WRITE_PIPE_INIT(0x00000000);
585 NV_WRITE_PIPE_INIT(0x00000000);
586 NV_WRITE_PIPE_INIT(0x7149f2ca);
587 NV_WRITE_PIPE_INIT(0x00000000);
588 NV_WRITE_PIPE_INIT(0x00000000);
589 NV_WRITE_PIPE_INIT(0x00000000);
590 NV_WRITE_PIPE_INIT(0x7149f2ca);
591 NV_WRITE_PIPE_INIT(0x00000000);
592 NV_WRITE_PIPE_INIT(0x00000000);
593 NV_WRITE_PIPE_INIT(0x00000000);
594 NV_WRITE_PIPE_INIT(0x7149f2ca);
595 NV_WRITE_PIPE_INIT(0x00000000);
596 NV_WRITE_PIPE_INIT(0x00000000);
597 NV_WRITE_PIPE_INIT(0x00000000);
598 NV_WRITE_PIPE_INIT(0x7149f2ca);
599 NV_WRITE_PIPE_INIT(0x00000000);
600 NV_WRITE_PIPE_INIT(0x00000000);
601 NV_WRITE_PIPE_INIT(0x00000000);
602 NV_WRITE_PIPE_INIT(0x7149f2ca);
603 for (i = 0; i < 35; i++)
604 NV_WRITE_PIPE_INIT(0x00000000);
605 PIPE_INIT_END(0x7000);
608 for (i = 0; i < 48; i++)
609 NV_WRITE_PIPE_INIT(0x00000000);
610 PIPE_INIT_END(0x7400);
613 for (i = 0; i < 48; i++)
614 NV_WRITE_PIPE_INIT(0x00000000);
615 PIPE_INIT_END(0x7800);
618 for (i = 0; i < 32; i++)
619 NV_WRITE_PIPE_INIT(0x00000000);
620 PIPE_INIT_END(0x4400);
623 for (i = 0; i < 16; i++)
624 NV_WRITE_PIPE_INIT(0x00000000);
625 PIPE_INIT_END(0x0000);
628 for (i = 0; i < 4; i++)
629 NV_WRITE_PIPE_INIT(0x00000000);
630 PIPE_INIT_END(0x0040);
634 #undef NV_WRITE_PIPE_INIT
637 static int nv10_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
640 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++) {
641 if (nv10_graph_ctx_regs[i] == reg)
644 NV_ERROR(dev, "unknow offset nv10_ctx_regs %d\n", reg);
648 static int nv17_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
651 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++) {
652 if (nv17_graph_ctx_regs[i] == reg)
655 NV_ERROR(dev, "unknow offset nv17_ctx_regs %d\n", reg);
659 static void nv10_graph_load_dma_vtxbuf(struct nouveau_channel *chan,
662 struct drm_device *dev = chan->dev;
663 uint32_t st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
664 uint32_t ctx_user, ctx_switch[5];
667 /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
668 * that cannot be restored via MMIO. Do it through the FIFO
672 /* Look for a celsius object */
673 for (i = 0; i < 8; i++) {
674 int class = nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
676 if (class == 0x56 || class == 0x96 || class == 0x99) {
682 if (subchan < 0 || !inst)
685 /* Save the current ctx object */
686 ctx_user = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
687 for (i = 0; i < 5; i++)
688 ctx_switch[i] = nv_rd32(dev, NV10_PGRAPH_CTX_SWITCH(i));
690 /* Save the FIFO state */
691 st2 = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
692 st2_dl = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DL);
693 st2_dh = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DH);
694 fifo_ptr = nv_rd32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR);
696 for (i = 0; i < ARRAY_SIZE(fifo); i++)
697 fifo[i] = nv_rd32(dev, 0x4007a0 + 4 * i);
699 /* Switch to the celsius subchannel */
700 for (i = 0; i < 5; i++)
701 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i),
702 nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(subchan, i)));
703 nv_mask(dev, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
705 /* Inject NV10TCL_DMA_VTXBUF */
706 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
707 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2,
708 0x2c000000 | chan->id << 20 | subchan << 16 | 0x18c);
709 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
710 nv_mask(dev, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
711 nv04_graph_fifo_access(dev, true);
712 nv04_graph_fifo_access(dev, false);
714 /* Restore the FIFO state */
715 for (i = 0; i < ARRAY_SIZE(fifo); i++)
716 nv_wr32(dev, 0x4007a0 + 4 * i, fifo[i]);
718 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
719 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, st2);
720 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
721 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
723 /* Restore the current ctx object */
724 for (i = 0; i < 5; i++)
725 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
726 nv_wr32(dev, NV10_PGRAPH_CTX_USER, ctx_user);
730 nv10_graph_load_context(struct nouveau_channel *chan)
732 struct drm_device *dev = chan->dev;
733 struct drm_nouveau_private *dev_priv = dev->dev_private;
734 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
738 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
739 nv_wr32(dev, nv10_graph_ctx_regs[i], pgraph_ctx->nv10[i]);
740 if (dev_priv->chipset >= 0x17) {
741 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
742 nv_wr32(dev, nv17_graph_ctx_regs[i],
743 pgraph_ctx->nv17[i]);
746 nv10_graph_load_pipe(chan);
747 nv10_graph_load_dma_vtxbuf(chan, (nv_rd32(dev, NV10_PGRAPH_GLOBALSTATE1)
750 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
751 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
752 nv_wr32(dev, NV10_PGRAPH_CTX_USER, (tmp & 0xffffff) | chan->id << 24);
753 tmp = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
754 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, tmp & 0xcfffffff);
759 nv10_graph_unload_context(struct drm_device *dev)
761 struct drm_nouveau_private *dev_priv = dev->dev_private;
762 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
763 struct nouveau_channel *chan;
764 struct graph_state *ctx;
768 chan = nv10_graph_channel(dev);
771 ctx = chan->engctx[NVOBJ_ENGINE_GR];
773 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
774 ctx->nv10[i] = nv_rd32(dev, nv10_graph_ctx_regs[i]);
776 if (dev_priv->chipset >= 0x17) {
777 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
778 ctx->nv17[i] = nv_rd32(dev, nv17_graph_ctx_regs[i]);
781 nv10_graph_save_pipe(chan);
783 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
784 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
785 tmp |= (pfifo->channels - 1) << 24;
786 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
791 nv10_graph_context_switch(struct drm_device *dev)
793 struct drm_nouveau_private *dev_priv = dev->dev_private;
794 struct nouveau_channel *chan = NULL;
797 nouveau_wait_for_idle(dev);
799 /* If previous context is valid, we need to save it */
800 nv10_graph_unload_context(dev);
802 /* Load context for next channel */
803 chid = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
804 chan = dev_priv->channels.ptr[chid];
805 if (chan && chan->engctx[NVOBJ_ENGINE_GR])
806 nv10_graph_load_context(chan);
809 #define NV_WRITE_CTX(reg, val) do { \
810 int offset = nv10_graph_ctx_regs_find_offset(dev, reg); \
812 pgraph_ctx->nv10[offset] = val; \
815 #define NV17_WRITE_CTX(reg, val) do { \
816 int offset = nv17_graph_ctx_regs_find_offset(dev, reg); \
818 pgraph_ctx->nv17[offset] = val; \
821 struct nouveau_channel *
822 nv10_graph_channel(struct drm_device *dev)
824 struct drm_nouveau_private *dev_priv = dev->dev_private;
825 int chid = dev_priv->engine.fifo.channels;
827 if (nv_rd32(dev, NV10_PGRAPH_CTX_CONTROL) & 0x00010000)
828 chid = nv_rd32(dev, NV10_PGRAPH_CTX_USER) >> 24;
830 if (chid >= dev_priv->engine.fifo.channels)
833 return dev_priv->channels.ptr[chid];
837 nv10_graph_context_new(struct nouveau_channel *chan, int engine)
839 struct drm_device *dev = chan->dev;
840 struct drm_nouveau_private *dev_priv = dev->dev_private;
841 struct graph_state *pgraph_ctx;
843 NV_DEBUG(dev, "nv10_graph_context_create %d\n", chan->id);
845 pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
846 if (pgraph_ctx == NULL)
848 chan->engctx[engine] = pgraph_ctx;
850 NV_WRITE_CTX(0x00400e88, 0x08000000);
851 NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
852 NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
853 NV_WRITE_CTX(0x00400e10, 0x00001000);
854 NV_WRITE_CTX(0x00400e14, 0x00001000);
855 NV_WRITE_CTX(0x00400e30, 0x00080008);
856 NV_WRITE_CTX(0x00400e34, 0x00080008);
857 if (dev_priv->chipset >= 0x17) {
858 /* is it really needed ??? */
859 NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
860 nv_rd32(dev, NV10_PGRAPH_DEBUG_4));
861 NV17_WRITE_CTX(0x004006b0, nv_rd32(dev, 0x004006b0));
862 NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
863 NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
864 NV17_WRITE_CTX(0x00400ec0, 0x00000080);
865 NV17_WRITE_CTX(0x00400ed0, 0x00000080);
867 NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->id << 24);
869 nv10_graph_create_pipe(chan);
874 nv10_graph_context_del(struct nouveau_channel *chan, int engine)
876 struct drm_device *dev = chan->dev;
877 struct drm_nouveau_private *dev_priv = dev->dev_private;
878 struct graph_state *pgraph_ctx = chan->engctx[engine];
881 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
882 nv04_graph_fifo_access(dev, false);
884 /* Unload the context if it's the currently active one */
885 if (nv10_graph_channel(dev) == chan)
886 nv10_graph_unload_context(dev);
888 nv04_graph_fifo_access(dev, true);
889 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
891 /* Free the context resources */
892 chan->engctx[engine] = NULL;
897 nv10_graph_set_tile_region(struct drm_device *dev, int i)
899 struct drm_nouveau_private *dev_priv = dev->dev_private;
900 struct nouveau_tile_reg *tile = &dev_priv->tile.reg[i];
902 nv_wr32(dev, NV10_PGRAPH_TLIMIT(i), tile->limit);
903 nv_wr32(dev, NV10_PGRAPH_TSIZE(i), tile->pitch);
904 nv_wr32(dev, NV10_PGRAPH_TILE(i), tile->addr);
908 nv10_graph_init(struct drm_device *dev, int engine)
910 struct drm_nouveau_private *dev_priv = dev->dev_private;
914 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
915 ~NV_PMC_ENABLE_PGRAPH);
916 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
917 NV_PMC_ENABLE_PGRAPH);
919 nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
920 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
922 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
923 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
924 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x00118700);
925 /* nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
926 nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
927 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0x55DE0830 |
930 if (dev_priv->chipset >= 0x17) {
931 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x1f000000);
932 nv_wr32(dev, 0x400a10, 0x3ff3fb6);
933 nv_wr32(dev, 0x400838, 0x2f8684);
934 nv_wr32(dev, 0x40083c, 0x115f3f);
935 nv_wr32(dev, 0x004006b0, 0x40000020);
937 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00000000);
939 /* Turn all the tiling regions off. */
940 for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
941 nv10_graph_set_tile_region(dev, i);
943 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
944 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
945 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
946 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
947 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
948 nv_wr32(dev, NV10_PGRAPH_STATE, 0xFFFFFFFF);
950 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
951 tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
952 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
953 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
954 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
960 nv10_graph_fini(struct drm_device *dev, int engine)
962 nv10_graph_unload_context(dev);
963 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
968 nv17_graph_mthd_lma_window(struct nouveau_channel *chan,
969 u32 class, u32 mthd, u32 data)
971 struct graph_state *ctx = chan->engctx[NVOBJ_ENGINE_GR];
972 struct drm_device *dev = chan->dev;
973 struct pipe_state *pipe = &ctx->pipe_state;
974 uint32_t pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
975 uint32_t xfmode0, xfmode1;
978 ctx->lma_window[(mthd - 0x1638) / 4] = data;
983 nouveau_wait_for_idle(dev);
985 PIPE_SAVE(dev, pipe_0x0040, 0x0040);
986 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
988 PIPE_RESTORE(dev, ctx->lma_window, 0x6790);
990 nouveau_wait_for_idle(dev);
992 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
993 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
995 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
996 PIPE_SAVE(dev, pipe_0x64c0, 0x64c0);
997 PIPE_SAVE(dev, pipe_0x6ab0, 0x6ab0);
998 PIPE_SAVE(dev, pipe_0x6a80, 0x6a80);
1000 nouveau_wait_for_idle(dev);
1002 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
1003 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
1004 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
1005 for (i = 0; i < 4; i++)
1006 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1007 for (i = 0; i < 4; i++)
1008 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1010 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
1011 for (i = 0; i < 3; i++)
1012 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1014 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
1015 for (i = 0; i < 3; i++)
1016 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1018 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
1019 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
1021 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
1023 nouveau_wait_for_idle(dev);
1025 PIPE_RESTORE(dev, pipe_0x0040, 0x0040);
1027 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
1028 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
1030 PIPE_RESTORE(dev, pipe_0x64c0, 0x64c0);
1031 PIPE_RESTORE(dev, pipe_0x6ab0, 0x6ab0);
1032 PIPE_RESTORE(dev, pipe_0x6a80, 0x6a80);
1033 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
1035 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
1036 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1038 nouveau_wait_for_idle(dev);
1044 nv17_graph_mthd_lma_enable(struct nouveau_channel *chan,
1045 u32 class, u32 mthd, u32 data)
1047 struct drm_device *dev = chan->dev;
1049 nouveau_wait_for_idle(dev);
1051 nv_wr32(dev, NV10_PGRAPH_DEBUG_4,
1052 nv_rd32(dev, NV10_PGRAPH_DEBUG_4) | 0x1 << 8);
1053 nv_wr32(dev, 0x004006b0,
1054 nv_rd32(dev, 0x004006b0) | 0x8 << 24);
1059 struct nouveau_bitfield nv10_graph_intr[] = {
1060 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1061 { NV_PGRAPH_INTR_ERROR, "ERROR" },
1065 struct nouveau_bitfield nv10_graph_nstatus[] = {
1066 { NV10_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1067 { NV10_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1068 { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1069 { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1074 nv10_graph_isr(struct drm_device *dev)
1078 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1079 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1080 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1081 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1082 u32 chid = (addr & 0x01f00000) >> 20;
1083 u32 subc = (addr & 0x00070000) >> 16;
1084 u32 mthd = (addr & 0x00001ffc);
1085 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1086 u32 class = nv_rd32(dev, 0x400160 + subc * 4) & 0xfff;
1089 if (stat & NV_PGRAPH_INTR_ERROR) {
1090 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1091 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1092 show &= ~NV_PGRAPH_INTR_ERROR;
1096 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1097 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1098 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1099 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1100 nv10_graph_context_switch(dev);
1103 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1104 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1106 if (show && nouveau_ratelimit()) {
1107 NV_INFO(dev, "PGRAPH -");
1108 nouveau_bitfield_print(nv10_graph_intr, show);
1109 printk(" nsource:");
1110 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1111 printk(" nstatus:");
1112 nouveau_bitfield_print(nv10_graph_nstatus, nstatus);
1114 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1115 "mthd 0x%04x data 0x%08x\n",
1116 chid, subc, class, mthd, data);
1122 nv10_graph_destroy(struct drm_device *dev, int engine)
1124 struct nv10_graph_engine *pgraph = nv_engine(dev, engine);
1126 nouveau_irq_unregister(dev, 12);
1131 nv10_graph_create(struct drm_device *dev)
1133 struct drm_nouveau_private *dev_priv = dev->dev_private;
1134 struct nv10_graph_engine *pgraph;
1136 pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
1140 pgraph->base.destroy = nv10_graph_destroy;
1141 pgraph->base.init = nv10_graph_init;
1142 pgraph->base.fini = nv10_graph_fini;
1143 pgraph->base.context_new = nv10_graph_context_new;
1144 pgraph->base.context_del = nv10_graph_context_del;
1145 pgraph->base.object_new = nv04_graph_object_new;
1146 pgraph->base.set_tile_region = nv10_graph_set_tile_region;
1148 NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
1149 nouveau_irq_register(dev, 12, nv10_graph_isr);
1152 NVOBJ_CLASS(dev, 0x506e, SW);
1153 NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
1155 NVOBJ_CLASS(dev, 0x0030, GR); /* null */
1156 NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */
1157 NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */
1158 NVOBJ_CLASS(dev, 0x005f, GR); /* imageblit */
1159 NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */
1160 NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */
1161 NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */
1162 NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */
1163 NVOBJ_CLASS(dev, 0x0043, GR); /* rop */
1164 NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */
1165 NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */
1166 NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */
1167 NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */
1168 NVOBJ_CLASS(dev, 0x0052, GR); /* swzsurf */
1169 NVOBJ_CLASS(dev, 0x0093, GR); /* surf3d */
1170 NVOBJ_CLASS(dev, 0x0094, GR); /* tex_tri */
1171 NVOBJ_CLASS(dev, 0x0095, GR); /* multitex_tri */
1174 if (dev_priv->chipset <= 0x10) {
1175 NVOBJ_CLASS(dev, 0x0056, GR);
1177 if (dev_priv->chipset < 0x17 || dev_priv->chipset == 0x1a) {
1178 NVOBJ_CLASS(dev, 0x0096, GR);
1180 NVOBJ_CLASS(dev, 0x0099, GR);
1181 NVOBJ_MTHD (dev, 0x0099, 0x1638, nv17_graph_mthd_lma_window);
1182 NVOBJ_MTHD (dev, 0x0099, 0x163c, nv17_graph_mthd_lma_window);
1183 NVOBJ_MTHD (dev, 0x0099, 0x1640, nv17_graph_mthd_lma_window);
1184 NVOBJ_MTHD (dev, 0x0099, 0x1644, nv17_graph_mthd_lma_window);
1185 NVOBJ_MTHD (dev, 0x0099, 0x1658, nv17_graph_mthd_lma_enable);