drm/nv04/gr: move to exec engine interfaces
Like nv10-nv50, needs cleanup. Signed-off-by: Ben Skeggs <bskeggs@redhat.com>
This commit is contained in:
parent
d11db27901
commit
4976986bd4
3 changed files with 195 additions and 184 deletions
|
@ -1141,15 +1141,9 @@ extern int nvc0_fifo_load_context(struct nouveau_channel *);
|
|||
extern int nvc0_fifo_unload_context(struct drm_device *);
|
||||
|
||||
/* nv04_graph.c */
|
||||
extern int nv04_graph_init(struct drm_device *);
|
||||
extern void nv04_graph_takedown(struct drm_device *);
|
||||
extern int nv04_graph_create(struct drm_device *);
|
||||
extern void nv04_graph_fifo_access(struct drm_device *, bool);
|
||||
extern struct nouveau_channel *nv04_graph_channel(struct drm_device *);
|
||||
extern int nv04_graph_create_context(struct nouveau_channel *);
|
||||
extern void nv04_graph_destroy_context(struct nouveau_channel *);
|
||||
extern int nv04_graph_load_context(struct nouveau_channel *);
|
||||
extern int nv04_graph_unload_context(struct drm_device *);
|
||||
extern int nv04_graph_object_new(struct nouveau_channel *, u32, u16);
|
||||
extern int nv04_graph_object_new(struct nouveau_channel *, int, u32, u16);
|
||||
extern int nv04_graph_mthd_page_flip(struct nouveau_channel *chan,
|
||||
u32 class, u32 mthd, u32 data);
|
||||
extern struct nouveau_bitfield nv04_graph_nsource[];
|
||||
|
|
|
@ -65,15 +65,10 @@ static int nouveau_init_engine_ptrs(struct drm_device *dev)
|
|||
engine->timer.takedown = nv04_timer_takedown;
|
||||
engine->fb.init = nv04_fb_init;
|
||||
engine->fb.takedown = nv04_fb_takedown;
|
||||
engine->graph.init = nv04_graph_init;
|
||||
engine->graph.takedown = nv04_graph_takedown;
|
||||
engine->graph.fifo_access = nv04_graph_fifo_access;
|
||||
engine->graph.channel = nv04_graph_channel;
|
||||
engine->graph.create_context = nv04_graph_create_context;
|
||||
engine->graph.destroy_context = nv04_graph_destroy_context;
|
||||
engine->graph.load_context = nv04_graph_load_context;
|
||||
engine->graph.unload_context = nv04_graph_unload_context;
|
||||
engine->graph.object_new = nv04_graph_object_new;
|
||||
engine->graph.init = nouveau_stub_init;
|
||||
engine->graph.takedown = nouveau_stub_takedown;
|
||||
engine->graph.channel = nvc0_graph_channel;
|
||||
engine->graph.fifo_access = nvc0_graph_fifo_access;
|
||||
engine->fifo.channels = 16;
|
||||
engine->fifo.init = nv04_fifo_init;
|
||||
engine->fifo.takedown = nv04_fifo_fini;
|
||||
|
@ -599,6 +594,9 @@ nouveau_card_init(struct drm_device *dev)
|
|||
goto out_timer;
|
||||
|
||||
switch (dev_priv->card_type) {
|
||||
case NV_04:
|
||||
nv04_graph_create(dev);
|
||||
break;
|
||||
case NV_10:
|
||||
nv10_graph_create(dev);
|
||||
break;
|
||||
|
|
|
@ -30,8 +30,9 @@
|
|||
#include "nouveau_util.h"
|
||||
#include "nouveau_ramht.h"
|
||||
|
||||
static int nv04_graph_register(struct drm_device *dev);
|
||||
static void nv04_graph_isr(struct drm_device *dev);
|
||||
struct nv04_graph_engine {
|
||||
struct nouveau_exec_engine base;
|
||||
};
|
||||
|
||||
static uint32_t nv04_graph_ctx_regs[] = {
|
||||
0x0040053c,
|
||||
|
@ -351,7 +352,7 @@ struct graph_state {
|
|||
uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
|
||||
};
|
||||
|
||||
struct nouveau_channel *
|
||||
static struct nouveau_channel *
|
||||
nv04_graph_channel(struct drm_device *dev)
|
||||
{
|
||||
struct drm_nouveau_private *dev_priv = dev->dev_private;
|
||||
|
@ -366,26 +367,6 @@ nv04_graph_channel(struct drm_device *dev)
|
|||
return dev_priv->channels.ptr[chid];
|
||||
}
|
||||
|
||||
static void
|
||||
nv04_graph_context_switch(struct drm_device *dev)
|
||||
{
|
||||
struct drm_nouveau_private *dev_priv = dev->dev_private;
|
||||
struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
|
||||
struct nouveau_channel *chan = NULL;
|
||||
int chid;
|
||||
|
||||
nouveau_wait_for_idle(dev);
|
||||
|
||||
/* If previous context is valid, we need to save it */
|
||||
pgraph->unload_context(dev);
|
||||
|
||||
/* Load context for next channel */
|
||||
chid = dev_priv->engine.fifo.channel_id(dev);
|
||||
chan = dev_priv->channels.ptr[chid];
|
||||
if (chan)
|
||||
nv04_graph_load_context(chan);
|
||||
}
|
||||
|
||||
static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
|
||||
{
|
||||
int i;
|
||||
|
@ -398,48 +379,11 @@ static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
int nv04_graph_create_context(struct nouveau_channel *chan)
|
||||
{
|
||||
struct graph_state *pgraph_ctx;
|
||||
NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
|
||||
|
||||
chan->pgraph_ctx = pgraph_ctx = kzalloc(sizeof(*pgraph_ctx),
|
||||
GFP_KERNEL);
|
||||
if (pgraph_ctx == NULL)
|
||||
return -ENOMEM;
|
||||
|
||||
*ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void nv04_graph_destroy_context(struct nouveau_channel *chan)
|
||||
static int
|
||||
nv04_graph_load_context(struct nouveau_channel *chan)
|
||||
{
|
||||
struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
|
||||
struct drm_device *dev = chan->dev;
|
||||
struct drm_nouveau_private *dev_priv = dev->dev_private;
|
||||
struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
|
||||
struct graph_state *pgraph_ctx = chan->pgraph_ctx;
|
||||
unsigned long flags;
|
||||
|
||||
spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
|
||||
pgraph->fifo_access(dev, false);
|
||||
|
||||
/* Unload the context if it's the currently active one */
|
||||
if (pgraph->channel(dev) == chan)
|
||||
pgraph->unload_context(dev);
|
||||
|
||||
/* Free the context resources */
|
||||
kfree(pgraph_ctx);
|
||||
chan->pgraph_ctx = NULL;
|
||||
|
||||
pgraph->fifo_access(dev, true);
|
||||
spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
|
||||
}
|
||||
|
||||
int nv04_graph_load_context(struct nouveau_channel *chan)
|
||||
{
|
||||
struct drm_device *dev = chan->dev;
|
||||
struct graph_state *pgraph_ctx = chan->pgraph_ctx;
|
||||
uint32_t tmp;
|
||||
int i;
|
||||
|
||||
|
@ -457,20 +401,19 @@ int nv04_graph_load_context(struct nouveau_channel *chan)
|
|||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
static int
|
||||
nv04_graph_unload_context(struct drm_device *dev)
|
||||
{
|
||||
struct drm_nouveau_private *dev_priv = dev->dev_private;
|
||||
struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
|
||||
struct nouveau_channel *chan = NULL;
|
||||
struct graph_state *ctx;
|
||||
uint32_t tmp;
|
||||
int i;
|
||||
|
||||
chan = pgraph->channel(dev);
|
||||
chan = nv04_graph_channel(dev);
|
||||
if (!chan)
|
||||
return 0;
|
||||
ctx = chan->pgraph_ctx;
|
||||
ctx = chan->engctx[NVOBJ_ENGINE_GR];
|
||||
|
||||
for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
|
||||
ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
|
||||
|
@ -482,8 +425,48 @@ nv04_graph_unload_context(struct drm_device *dev)
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
nv04_graph_context_new(struct nouveau_channel *chan, int engine)
|
||||
{
|
||||
struct graph_state *pgraph_ctx;
|
||||
NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
|
||||
|
||||
pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
|
||||
if (pgraph_ctx == NULL)
|
||||
return -ENOMEM;
|
||||
|
||||
*ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
|
||||
|
||||
chan->engctx[engine] = pgraph_ctx;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
nv04_graph_context_del(struct nouveau_channel *chan, int engine)
|
||||
{
|
||||
struct drm_device *dev = chan->dev;
|
||||
struct drm_nouveau_private *dev_priv = dev->dev_private;
|
||||
struct graph_state *pgraph_ctx = chan->engctx[engine];
|
||||
unsigned long flags;
|
||||
|
||||
spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
|
||||
nv04_graph_fifo_access(dev, false);
|
||||
|
||||
/* Unload the context if it's the currently active one */
|
||||
if (nv04_graph_channel(dev) == chan)
|
||||
nv04_graph_unload_context(dev);
|
||||
|
||||
nv04_graph_fifo_access(dev, true);
|
||||
spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
|
||||
|
||||
/* Free the context resources */
|
||||
kfree(pgraph_ctx);
|
||||
chan->engctx[engine] = NULL;
|
||||
}
|
||||
|
||||
int
|
||||
nv04_graph_object_new(struct nouveau_channel *chan, u32 handle, u16 class)
|
||||
nv04_graph_object_new(struct nouveau_channel *chan, int engine,
|
||||
u32 handle, u16 class)
|
||||
{
|
||||
struct drm_device *dev = chan->dev;
|
||||
struct nouveau_gpuobj *obj = NULL;
|
||||
|
@ -509,23 +492,18 @@ nv04_graph_object_new(struct nouveau_channel *chan, u32 handle, u16 class)
|
|||
return ret;
|
||||
}
|
||||
|
||||
int nv04_graph_init(struct drm_device *dev)
|
||||
static int
|
||||
nv04_graph_init(struct drm_device *dev, int engine)
|
||||
{
|
||||
struct drm_nouveau_private *dev_priv = dev->dev_private;
|
||||
uint32_t tmp;
|
||||
int ret;
|
||||
|
||||
nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
|
||||
~NV_PMC_ENABLE_PGRAPH);
|
||||
nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
|
||||
NV_PMC_ENABLE_PGRAPH);
|
||||
|
||||
ret = nv04_graph_register(dev);
|
||||
if (ret)
|
||||
return ret;
|
||||
|
||||
/* Enable PGRAPH interrupts */
|
||||
nouveau_irq_register(dev, 12, nv04_graph_isr);
|
||||
nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
|
||||
nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
|
||||
|
||||
|
@ -559,10 +537,12 @@ int nv04_graph_init(struct drm_device *dev)
|
|||
return 0;
|
||||
}
|
||||
|
||||
void nv04_graph_takedown(struct drm_device *dev)
|
||||
static int
|
||||
nv04_graph_fini(struct drm_device *dev, int engine)
|
||||
{
|
||||
nv04_graph_unload_context(dev);
|
||||
nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
|
||||
nouveau_irq_unregister(dev, 12);
|
||||
return 0;
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -997,13 +977,138 @@ nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
|
|||
return 1;
|
||||
}
|
||||
|
||||
static int
|
||||
nv04_graph_register(struct drm_device *dev)
|
||||
static struct nouveau_bitfield nv04_graph_intr[] = {
|
||||
{ NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
|
||||
{}
|
||||
};
|
||||
|
||||
static struct nouveau_bitfield nv04_graph_nstatus[] = {
|
||||
{ NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
|
||||
{ NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
|
||||
{ NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
|
||||
{ NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
|
||||
{}
|
||||
};
|
||||
|
||||
struct nouveau_bitfield nv04_graph_nsource[] = {
|
||||
{ NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
|
||||
{ NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
|
||||
{ NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
|
||||
{ NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
|
||||
{ NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
|
||||
{ NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
|
||||
{ NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
|
||||
{ NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
|
||||
{ NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
|
||||
{}
|
||||
};
|
||||
|
||||
static void
|
||||
nv04_graph_context_switch(struct drm_device *dev)
|
||||
{
|
||||
struct drm_nouveau_private *dev_priv = dev->dev_private;
|
||||
struct nouveau_channel *chan = NULL;
|
||||
int chid;
|
||||
|
||||
if (dev_priv->engine.graph.registered)
|
||||
return 0;
|
||||
nouveau_wait_for_idle(dev);
|
||||
|
||||
/* If previous context is valid, we need to save it */
|
||||
nv04_graph_unload_context(dev);
|
||||
|
||||
/* Load context for next channel */
|
||||
chid = dev_priv->engine.fifo.channel_id(dev);
|
||||
chan = dev_priv->channels.ptr[chid];
|
||||
if (chan)
|
||||
nv04_graph_load_context(chan);
|
||||
}
|
||||
|
||||
static void
|
||||
nv04_graph_isr(struct drm_device *dev)
|
||||
{
|
||||
u32 stat;
|
||||
|
||||
while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
|
||||
u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
|
||||
u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
|
||||
u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
|
||||
u32 chid = (addr & 0x0f000000) >> 24;
|
||||
u32 subc = (addr & 0x0000e000) >> 13;
|
||||
u32 mthd = (addr & 0x00001ffc);
|
||||
u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
|
||||
u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
|
||||
u32 show = stat;
|
||||
|
||||
if (stat & NV_PGRAPH_INTR_NOTIFY) {
|
||||
if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
|
||||
if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
|
||||
show &= ~NV_PGRAPH_INTR_NOTIFY;
|
||||
}
|
||||
}
|
||||
|
||||
if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
|
||||
nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
|
||||
stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
|
||||
show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
|
||||
nv04_graph_context_switch(dev);
|
||||
}
|
||||
|
||||
nv_wr32(dev, NV03_PGRAPH_INTR, stat);
|
||||
nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
|
||||
|
||||
if (show && nouveau_ratelimit()) {
|
||||
NV_INFO(dev, "PGRAPH -");
|
||||
nouveau_bitfield_print(nv04_graph_intr, show);
|
||||
printk(" nsource:");
|
||||
nouveau_bitfield_print(nv04_graph_nsource, nsource);
|
||||
printk(" nstatus:");
|
||||
nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
|
||||
printk("\n");
|
||||
NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
|
||||
"mthd 0x%04x data 0x%08x\n",
|
||||
chid, subc, class, mthd, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
nv04_graph_destroy(struct drm_device *dev, int engine)
|
||||
{
|
||||
struct nv04_graph_engine *pgraph = nv_engine(dev, engine);
|
||||
|
||||
nouveau_irq_unregister(dev, 12);
|
||||
|
||||
NVOBJ_ENGINE_DEL(dev, GR);
|
||||
kfree(pgraph);
|
||||
}
|
||||
|
||||
int
|
||||
nv04_graph_create(struct drm_device *dev)
|
||||
{
|
||||
struct nv04_graph_engine *pgraph;
|
||||
|
||||
pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
|
||||
if (!pgraph)
|
||||
return -ENOMEM;
|
||||
|
||||
pgraph->base.destroy = nv04_graph_destroy;
|
||||
pgraph->base.init = nv04_graph_init;
|
||||
pgraph->base.fini = nv04_graph_fini;
|
||||
pgraph->base.context_new = nv04_graph_context_new;
|
||||
pgraph->base.context_del = nv04_graph_context_del;
|
||||
pgraph->base.object_new = nv04_graph_object_new;
|
||||
|
||||
NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
|
||||
nouveau_irq_register(dev, 12, nv04_graph_isr);
|
||||
|
||||
/* dvd subpicture */
|
||||
NVOBJ_CLASS(dev, 0x0038, GR);
|
||||
|
@ -1250,91 +1355,5 @@ nv04_graph_register(struct drm_device *dev)
|
|||
NVOBJ_CLASS(dev, 0x506e, SW);
|
||||
NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref);
|
||||
NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
|
||||
|
||||
dev_priv->engine.graph.registered = true;
|
||||
return 0;
|
||||
};
|
||||
|
||||
static struct nouveau_bitfield nv04_graph_intr[] = {
|
||||
{ NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
|
||||
{}
|
||||
};
|
||||
|
||||
static struct nouveau_bitfield nv04_graph_nstatus[] = {
|
||||
{ NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
|
||||
{ NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
|
||||
{ NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
|
||||
{ NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
|
||||
{}
|
||||
};
|
||||
|
||||
struct nouveau_bitfield nv04_graph_nsource[] = {
|
||||
{ NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
|
||||
{ NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
|
||||
{ NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
|
||||
{ NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
|
||||
{ NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
|
||||
{ NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
|
||||
{ NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
|
||||
{ NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
|
||||
{ NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
|
||||
{ NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
|
||||
{}
|
||||
};
|
||||
|
||||
static void
|
||||
nv04_graph_isr(struct drm_device *dev)
|
||||
{
|
||||
u32 stat;
|
||||
|
||||
while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
|
||||
u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
|
||||
u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
|
||||
u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
|
||||
u32 chid = (addr & 0x0f000000) >> 24;
|
||||
u32 subc = (addr & 0x0000e000) >> 13;
|
||||
u32 mthd = (addr & 0x00001ffc);
|
||||
u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
|
||||
u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
|
||||
u32 show = stat;
|
||||
|
||||
if (stat & NV_PGRAPH_INTR_NOTIFY) {
|
||||
if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
|
||||
if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
|
||||
show &= ~NV_PGRAPH_INTR_NOTIFY;
|
||||
}
|
||||
}
|
||||
|
||||
if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
|
||||
nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
|
||||
stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
|
||||
show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
|
||||
nv04_graph_context_switch(dev);
|
||||
}
|
||||
|
||||
nv_wr32(dev, NV03_PGRAPH_INTR, stat);
|
||||
nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
|
||||
|
||||
if (show && nouveau_ratelimit()) {
|
||||
NV_INFO(dev, "PGRAPH -");
|
||||
nouveau_bitfield_print(nv04_graph_intr, show);
|
||||
printk(" nsource:");
|
||||
nouveau_bitfield_print(nv04_graph_nsource, nsource);
|
||||
printk(" nstatus:");
|
||||
nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
|
||||
printk("\n");
|
||||
NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
|
||||
"mthd 0x%04x data 0x%08x\n",
|
||||
chid, subc, class, mthd, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue