diff options
Diffstat (limited to 'drivers/gpu/drm/i915')
-rw-r--r-- | drivers/gpu/drm/i915/i915_gem_request.c | 8 | ||||
-rw-r--r-- | drivers/gpu/drm/i915/intel_ringbuffer.h | 5 |
2 files changed, 4 insertions, 9 deletions
diff --git a/drivers/gpu/drm/i915/i915_gem_request.c b/drivers/gpu/drm/i915/i915_gem_request.c index f4e6c40ac4e3..606b0b8a5f91 100644 --- a/drivers/gpu/drm/i915/i915_gem_request.c +++ b/drivers/gpu/drm/i915/i915_gem_request.c @@ -419,7 +419,7 @@ void __i915_add_request(struct drm_i915_gem_request *request, * should already have been reserved in the ring buffer. Let the ring * know that it is time to use that space up. */ - request_start = intel_ring_get_tail(ring); + request_start = ring->tail; reserved_tail = request->reserved_space; request->reserved_space = 0; @@ -464,19 +464,19 @@ void __i915_add_request(struct drm_i915_gem_request *request, * GPU processing the request, we never over-estimate the * position of the head. */ - request->postfix = intel_ring_get_tail(ring); + request->postfix = ring->tail; if (i915.enable_execlists) { ret = engine->emit_request(request); } else { ret = engine->add_request(request); - request->tail = intel_ring_get_tail(ring); + request->tail = ring->tail; } /* Not allowed to fail! */ WARN(ret, "emit|add_request failed: %d!\n", ret); /* Sanity check that the reserved size was large enough. */ - ret = intel_ring_get_tail(ring) - request_start; + ret = ring->tail - request_start; if (ret < 0) ret += ring->size; WARN_ONCE(ret > reserved_tail, diff --git a/drivers/gpu/drm/i915/intel_ringbuffer.h b/drivers/gpu/drm/i915/intel_ringbuffer.h index 45ba29c0b20e..0c3c7185d9ad 100644 --- a/drivers/gpu/drm/i915/intel_ringbuffer.h +++ b/drivers/gpu/drm/i915/intel_ringbuffer.h @@ -490,11 +490,6 @@ static inline u32 intel_engine_get_seqno(struct intel_engine_cs *engine) int init_workarounds_ring(struct intel_engine_cs *engine); -static inline u32 intel_ring_get_tail(struct intel_ring *ring) -{ - return ring->tail; -} - /* * Arbitrary size for largest possible 'add request' sequence. The code paths * are complex and variable. Empirical measurement shows that the worst case |