mirror of
https://github.com/tiagovignatti/intel-gpu-tools.git
synced 2025-07-23 18:06:18 +00:00
context: libdrm wrappers
This wraps libdrm functionality to exec with contexts. This patch shouldn't be applied until libdrm for contexts is updated. Signed-off-by: Ben Widawsky <ben@bwidawsk.net>
This commit is contained in:
parent
c157e9485e
commit
a635a5ac3b
@ -75,13 +75,13 @@ intel_batchbuffer_free(struct intel_batchbuffer *batch)
|
|||||||
|
|
||||||
#define CMD_POLY_STIPPLE_OFFSET 0x7906
|
#define CMD_POLY_STIPPLE_OFFSET 0x7906
|
||||||
|
|
||||||
void
|
static unsigned int
|
||||||
intel_batchbuffer_flush_on_ring(struct intel_batchbuffer *batch, int ring)
|
flush_on_ring_common(struct intel_batchbuffer *batch, int ring)
|
||||||
{
|
{
|
||||||
unsigned int used = batch->ptr - batch->buffer;
|
unsigned int used = batch->ptr - batch->buffer;
|
||||||
|
|
||||||
if (used == 0)
|
if (used == 0)
|
||||||
return;
|
return 0;
|
||||||
|
|
||||||
if (IS_GEN5(batch->devid)) {
|
if (IS_GEN5(batch->devid)) {
|
||||||
/* emit gen5 w/a without batch space checks - we reserve that
|
/* emit gen5 w/a without batch space checks - we reserve that
|
||||||
@ -100,7 +100,17 @@ intel_batchbuffer_flush_on_ring(struct intel_batchbuffer *batch, int ring)
|
|||||||
/* Mark the end of the buffer. */
|
/* Mark the end of the buffer. */
|
||||||
*(uint32_t *)(batch->ptr) = MI_BATCH_BUFFER_END; /* noop */
|
*(uint32_t *)(batch->ptr) = MI_BATCH_BUFFER_END; /* noop */
|
||||||
batch->ptr += 4;
|
batch->ptr += 4;
|
||||||
used = batch->ptr - batch->buffer;
|
return batch->ptr - batch->buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
intel_batchbuffer_flush_on_ring(struct intel_batchbuffer *batch, int ring)
|
||||||
|
{
|
||||||
|
int ret;
|
||||||
|
unsigned int used = flush_on_ring_common(batch, ring);
|
||||||
|
|
||||||
|
if (used == 0)
|
||||||
|
return;
|
||||||
|
|
||||||
do_or_die(drm_intel_bo_subdata(batch->bo, 0, used, batch->buffer));
|
do_or_die(drm_intel_bo_subdata(batch->bo, 0, used, batch->buffer));
|
||||||
|
|
||||||
@ -111,6 +121,28 @@ intel_batchbuffer_flush_on_ring(struct intel_batchbuffer *batch, int ring)
|
|||||||
intel_batchbuffer_reset(batch);
|
intel_batchbuffer_reset(batch);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
intel_batchbuffer_flush_with_context(struct intel_batchbuffer *batch,
|
||||||
|
drm_intel_context *context)
|
||||||
|
{
|
||||||
|
int ret;
|
||||||
|
unsigned int used = flush_on_ring_common(batch, I915_EXEC_RENDER);
|
||||||
|
|
||||||
|
if (used == 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
ret = drm_intel_bo_subdata(batch->bo, 0, used, batch->buffer);
|
||||||
|
assert(ret == 0);
|
||||||
|
|
||||||
|
batch->ptr = NULL;
|
||||||
|
|
||||||
|
ret = drm_intel_gem_bo_context_exec(batch->bo, context, used,
|
||||||
|
I915_EXEC_RENDER);
|
||||||
|
assert(ret == 0);
|
||||||
|
|
||||||
|
intel_batchbuffer_reset(batch);
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
intel_batchbuffer_flush(struct intel_batchbuffer *batch)
|
intel_batchbuffer_flush(struct intel_batchbuffer *batch)
|
||||||
{
|
{
|
||||||
|
@ -25,6 +25,8 @@ void intel_batchbuffer_free(struct intel_batchbuffer *batch);
|
|||||||
|
|
||||||
void intel_batchbuffer_flush(struct intel_batchbuffer *batch);
|
void intel_batchbuffer_flush(struct intel_batchbuffer *batch);
|
||||||
void intel_batchbuffer_flush_on_ring(struct intel_batchbuffer *batch, int ring);
|
void intel_batchbuffer_flush_on_ring(struct intel_batchbuffer *batch, int ring);
|
||||||
|
void intel_batchbuffer_flush_with_context(struct intel_batchbuffer *batch,
|
||||||
|
drm_intel_context *context);
|
||||||
|
|
||||||
void intel_batchbuffer_reset(struct intel_batchbuffer *batch);
|
void intel_batchbuffer_reset(struct intel_batchbuffer *batch);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user