sna/gen2+: Prefer not to fallback if the source is busy

As if we try to perform the operation with outstanding operations on the
source pixmaps, we will stall waiting for them to complete.

Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
This commit is contained in:
Chris Wilson 2012-03-07 15:52:41 +00:00
parent 4899740f6f
commit 49a80ce1ff
10 changed files with 310 additions and 170 deletions

View File

@ -1512,16 +1512,37 @@ need_upload(PicturePtr p)
}
static bool
source_fallback(PicturePtr p)
source_is_busy(PixmapPtr pixmap)
{
struct sna_pixmap *priv = sna_pixmap(pixmap);
if (priv == NULL)
return false;
if (priv->clear)
return false;
if (priv->gpu_bo && kgem_bo_is_busy(priv->gpu_bo))
return true;
if (priv->cpu_bo && kgem_bo_is_busy(priv->cpu_bo))
return true;
return priv->gpu_damage && !priv->cpu_damage;
}
static bool
source_fallback(PicturePtr p, PixmapPtr pixmap)
{
if (sna_picture_is_solid(p, NULL))
return false;
return (has_alphamap(p) ||
is_unhandled_gradient(p) ||
!gen2_check_filter(p) ||
!gen2_check_repeat(p) ||
need_upload(p));
if (is_unhandled_gradient(p) || !gen2_check_repeat(p))
return true;
if (pixmap && source_is_busy(pixmap))
return false;
return has_alphamap(p) || !gen2_check_filter(p) || need_upload(p);
}
static bool
@ -1534,6 +1555,7 @@ gen2_composite_fallback(struct sna *sna,
PixmapPtr src_pixmap;
PixmapPtr mask_pixmap;
PixmapPtr dst_pixmap;
bool src_fallback, mask_fallback;
if (!gen2_check_dst_format(dst->format)) {
DBG(("%s: unknown destination format: %d\n",
@ -1542,18 +1564,27 @@ gen2_composite_fallback(struct sna *sna,
}
dst_pixmap = get_drawable_pixmap(dst->pDrawable);
src_pixmap = src->pDrawable ? get_drawable_pixmap(src->pDrawable) : NULL;
mask_pixmap = (mask && mask->pDrawable) ? get_drawable_pixmap(mask->pDrawable) : NULL;
src_fallback = source_fallback(src, src_pixmap);
if (mask) {
mask_pixmap = mask->pDrawable ? get_drawable_pixmap(mask->pDrawable) : NULL;
mask_fallback = source_fallback(mask, mask_pixmap);
} else {
mask_pixmap = NULL;
mask_fallback = NULL;
}
/* If we are using the destination as a source and need to
* readback in order to upload the source, do it all
* on the cpu.
*/
if (src_pixmap == dst_pixmap && source_fallback(src)) {
if (src_pixmap == dst_pixmap && src_fallback) {
DBG(("%s: src is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
if (mask_pixmap == dst_pixmap && source_fallback(mask)) {
if (mask_pixmap == dst_pixmap && mask_fallback) {
DBG(("%s: mask is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
@ -1566,34 +1597,28 @@ gen2_composite_fallback(struct sna *sna,
return FALSE;
}
if (src_pixmap && !source_fallback(src)) {
priv = sna_pixmap(src_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (src_pixmap && !src_fallback) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !source_fallback(mask)) {
priv = sna_pixmap(mask_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !mask_fallback) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
/* However if the dst is not on the GPU and we need to
* render one of the sources using the CPU, we may
* as well do the entire operation in place onthe CPU.
*/
if (source_fallback(src)) {
if (src_fallback) {
DBG(("%s: dst is on the CPU and src will fallback\n",
__FUNCTION__));
return TRUE;
}
if (mask && source_fallback(mask)) {
if (mask && mask_fallback) {
DBG(("%s: dst is on the CPU and mask will fallback\n",
__FUNCTION__));
return TRUE;

View File

@ -2511,16 +2511,37 @@ need_upload(PicturePtr p)
}
static bool
source_fallback(PicturePtr p)
source_is_busy(PixmapPtr pixmap)
{
struct sna_pixmap *priv = sna_pixmap(pixmap);
if (priv == NULL)
return false;
if (priv->clear)
return false;
if (priv->gpu_bo && kgem_bo_is_busy(priv->gpu_bo))
return true;
if (priv->cpu_bo && kgem_bo_is_busy(priv->cpu_bo))
return true;
return priv->gpu_damage && !priv->cpu_damage;
}
static bool
source_fallback(PicturePtr p, PixmapPtr pixmap)
{
if (sna_picture_is_solid(p, NULL))
return false;
return (has_alphamap(p) ||
!gen3_check_xformat(p) ||
!gen3_check_filter(p) ||
!gen3_check_repeat(p) ||
need_upload(p));
if (!gen3_check_xformat(p) || !gen3_check_repeat(p))
return true;
if (pixmap && source_is_busy(pixmap))
return false;
return has_alphamap(p) || !gen3_check_filter(p) || need_upload(p);
}
static bool
@ -2534,6 +2555,7 @@ gen3_composite_fallback(struct sna *sna,
PixmapPtr src_pixmap;
PixmapPtr mask_pixmap;
PixmapPtr dst_pixmap;
bool src_fallback, mask_fallback;
if (!gen3_check_dst_format(dst->format)) {
DBG(("%s: unknown destination format: %d\n",
@ -2542,18 +2564,27 @@ gen3_composite_fallback(struct sna *sna,
}
dst_pixmap = get_drawable_pixmap(dst->pDrawable);
src_pixmap = src->pDrawable ? get_drawable_pixmap(src->pDrawable) : NULL;
mask_pixmap = (mask && mask->pDrawable) ? get_drawable_pixmap(mask->pDrawable) : NULL;
src_fallback = source_fallback(src, src_pixmap);
if (mask) {
mask_pixmap = mask->pDrawable ? get_drawable_pixmap(mask->pDrawable) : NULL;
mask_fallback = source_fallback(mask, mask_pixmap);
} else {
mask_pixmap = NULL;
mask_fallback = false;
}
/* If we are using the destination as a source and need to
* readback in order to upload the source, do it all
* on the cpu.
*/
if (src_pixmap == dst_pixmap && source_fallback(src)) {
if (src_pixmap == dst_pixmap && src_fallback) {
DBG(("%s: src is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
if (mask_pixmap == dst_pixmap && source_fallback(mask)) {
if (mask_pixmap == dst_pixmap && mask_fallback) {
DBG(("%s: mask is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
@ -2575,38 +2606,28 @@ gen3_composite_fallback(struct sna *sna,
return FALSE;
}
if (src_pixmap && !source_fallback(src)) {
priv = sna_pixmap(src_pixmap);
if (priv &&
((priv->gpu_damage && !priv->cpu_damage) ||
(priv->cpu_bo && priv->cpu_bo->domain != DOMAIN_CPU))) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (src_pixmap && !src_fallback) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !source_fallback(mask)) {
priv = sna_pixmap(mask_pixmap);
if (priv &&
((priv->gpu_damage && !priv->cpu_damage) ||
(priv->cpu_bo && priv->cpu_bo->domain != DOMAIN_CPU))) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !mask_fallback) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
/* However if the dst is not on the GPU and we need to
* render one of the sources using the CPU, we may
* as well do the entire operation in place onthe CPU.
*/
if (source_fallback(src)) {
if (src_fallback) {
DBG(("%s: dst is on the CPU and src will fallback\n",
__FUNCTION__));
return TRUE;
}
if (mask && source_fallback(mask)) {
if (mask && mask_fallback) {
DBG(("%s: dst is on the CPU and mask will fallback\n",
__FUNCTION__));
return TRUE;

View File

@ -2122,11 +2122,8 @@ try_blt(struct sna *sna,
}
static bool
is_gradient(PicturePtr picture)
check_gradient(PicturePtr picture)
{
if (picture->pDrawable)
return FALSE;
switch (picture->pSourcePict->type) {
case SourcePictTypeSolidFill:
case SourcePictTypeLinear:
@ -2155,17 +2152,38 @@ need_upload(PicturePtr p)
}
static bool
source_fallback(PicturePtr p)
source_is_busy(PixmapPtr pixmap)
{
struct sna_pixmap *priv = sna_pixmap(pixmap);
if (priv == NULL)
return false;
if (priv->clear)
return false;
if (priv->gpu_bo && kgem_bo_is_busy(priv->gpu_bo))
return true;
return priv->gpu_damage && !priv->cpu_damage;
}
static bool
source_fallback(PicturePtr p, PixmapPtr pixmap)
{
if (sna_picture_is_solid(p, NULL))
return false;
return (has_alphamap(p) ||
is_gradient(p) ||
!gen4_check_filter(p) ||
!gen4_check_repeat(p) ||
!gen4_check_format(p->format) ||
need_upload(p));
if (p->pSourcePict)
return check_gradient(p);
if (!gen4_check_repeat(p) || !gen4_check_format(p->format))
return true;
/* soft errors: perfer to upload/compute rather than readback */
if (pixmap && source_is_busy(pixmap))
return false;
return has_alphamap(p) || !gen4_check_filter(p) || need_upload(p);
}
static bool
@ -2178,6 +2196,7 @@ gen4_composite_fallback(struct sna *sna,
PixmapPtr src_pixmap;
PixmapPtr mask_pixmap;
PixmapPtr dst_pixmap;
bool src_fallback, mask_fallback;
if (!gen4_check_dst_format(dst->format)) {
DBG(("%s: unknown destination format: %d\n",
@ -2186,18 +2205,27 @@ gen4_composite_fallback(struct sna *sna,
}
dst_pixmap = get_drawable_pixmap(dst->pDrawable);
src_pixmap = src->pDrawable ? get_drawable_pixmap(src->pDrawable) : NULL;
mask_pixmap = (mask && mask->pDrawable) ? get_drawable_pixmap(mask->pDrawable) : NULL;
src_fallback = source_fallback(src, src_pixmap);
if (mask) {
mask_pixmap = mask->pDrawable ? get_drawable_pixmap(mask->pDrawable) : NULL;
mask_fallback = source_fallback(mask, mask_pixmap);
} else {
mask_pixmap = NULL;
mask_fallback = false;
}
/* If we are using the destination as a source and need to
* readback in order to upload the source, do it all
* on the cpu.
*/
if (src_pixmap == dst_pixmap && source_fallback(src)) {
if (src_pixmap == dst_pixmap && src_fallback) {
DBG(("%s: src is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
if (mask_pixmap == dst_pixmap && source_fallback(mask)) {
if (mask_pixmap == dst_pixmap && mask_fallback) {
DBG(("%s: mask is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
@ -2210,34 +2238,28 @@ gen4_composite_fallback(struct sna *sna,
return FALSE;
}
if (src_pixmap && !source_fallback(src)) {
priv = sna_pixmap(src_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (!src_fallback) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !source_fallback(mask)) {
priv = sna_pixmap(mask_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask && !mask_fallback) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
/* However if the dst is not on the GPU and we need to
* render one of the sources using the CPU, we may
* as well do the entire operation in place onthe CPU.
*/
if (source_fallback(src)) {
if (src_fallback) {
DBG(("%s: dst is on the CPU and src will fallback\n",
__FUNCTION__));
return TRUE;
}
if (mask && source_fallback(mask)) {
if (mask && mask_fallback) {
DBG(("%s: dst is on the CPU and mask will fallback\n",
__FUNCTION__));
return TRUE;

View File

@ -2198,17 +2198,39 @@ need_upload(PicturePtr p)
}
static bool
source_fallback(PicturePtr p)
source_is_busy(PixmapPtr pixmap)
{
struct sna_pixmap *priv = sna_pixmap(pixmap);
if (priv == NULL)
return false;
if (priv->clear)
return false;
if (priv->gpu_bo && kgem_bo_is_busy(priv->gpu_bo))
return true;
if (priv->cpu_bo && kgem_bo_is_busy(priv->cpu_bo))
return true;
return priv->gpu_damage && !priv->cpu_damage;
}
static bool
source_fallback(PicturePtr p, PixmapPtr pixmap)
{
if (sna_picture_is_solid(p, NULL))
return false;
return (has_alphamap(p) ||
is_gradient(p) ||
!gen5_check_filter(p) ||
!gen5_check_repeat(p) ||
!gen5_check_format(p->format) ||
need_upload(p));
if (is_gradient(p) ||
!gen5_check_repeat(p) ||
!gen5_check_format(p->format))
return true;
if (pixmap && source_is_busy(pixmap))
return false;
return has_alphamap(p) || !gen5_check_filter(p) || need_upload(p);
}
static bool
@ -2221,6 +2243,7 @@ gen5_composite_fallback(struct sna *sna,
PixmapPtr src_pixmap;
PixmapPtr mask_pixmap;
PixmapPtr dst_pixmap;
bool src_fallback, mask_fallback;
if (!gen5_check_dst_format(dst->format)) {
DBG(("%s: unknown destination format: %d\n",
@ -2229,18 +2252,27 @@ gen5_composite_fallback(struct sna *sna,
}
dst_pixmap = get_drawable_pixmap(dst->pDrawable);
src_pixmap = src->pDrawable ? get_drawable_pixmap(src->pDrawable) : NULL;
mask_pixmap = (mask && mask->pDrawable) ? get_drawable_pixmap(mask->pDrawable) : NULL;
src_fallback = source_fallback(src, src_pixmap);
if (mask) {
mask_pixmap = mask->pDrawable ? get_drawable_pixmap(mask->pDrawable) : NULL;
mask_fallback = source_fallback(mask, mask_pixmap);
} else {
mask_pixmap = NULL;
mask_fallback = false;
}
/* If we are using the destination as a source and need to
* readback in order to upload the source, do it all
* on the cpu.
*/
if (src_pixmap == dst_pixmap && source_fallback(src)) {
if (src_pixmap == dst_pixmap && src_fallback) {
DBG(("%s: src is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
if (mask_pixmap == dst_pixmap && source_fallback(mask)) {
if (mask_pixmap == dst_pixmap && mask_fallback) {
DBG(("%s: mask is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
@ -2253,34 +2285,28 @@ gen5_composite_fallback(struct sna *sna,
return FALSE;
}
if (src_pixmap && !source_fallback(src)) {
priv = sna_pixmap(src_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (src_pixmap && !src_fallback) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !source_fallback(mask)) {
priv = sna_pixmap(mask_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !mask_fallback) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
/* However if the dst is not on the GPU and we need to
* render one of the sources using the CPU, we may
* as well do the entire operation in place onthe CPU.
*/
if (source_fallback(src)) {
if (src_fallback) {
DBG(("%s: dst is on the CPU and src will fallback\n",
__FUNCTION__));
return TRUE;
}
if (mask && source_fallback(mask)) {
if (mask && mask_fallback) {
DBG(("%s: dst is on the CPU and mask will fallback\n",
__FUNCTION__));
return TRUE;

View File

@ -2374,7 +2374,7 @@ try_blt(struct sna *sna,
}
static bool
is_gradient(PicturePtr picture)
check_gradient(PicturePtr picture)
{
if (picture->pDrawable)
return FALSE;
@ -2407,17 +2407,37 @@ need_upload(PicturePtr p)
}
static bool
source_fallback(PicturePtr p)
source_is_busy(PixmapPtr pixmap)
{
struct sna_pixmap *priv = sna_pixmap(pixmap);
if (priv == NULL || priv->clear)
return false;
if (priv->gpu_bo && kgem_bo_is_busy(priv->gpu_bo))
return true;
if (priv->cpu_bo && kgem_bo_is_busy(priv->cpu_bo))
return true;
return priv->gpu_damage && !priv->cpu_damage;
}
static bool
source_fallback(PicturePtr p, PixmapPtr pixmap)
{
if (sna_picture_is_solid(p, NULL))
return false;
return (has_alphamap(p) ||
is_gradient(p) ||
!gen6_check_filter(p) ||
!gen6_check_repeat(p) ||
!gen6_check_format(p->format) ||
need_upload(p));
if (p->pSourcePict)
return check_gradient(p);
if (!gen6_check_repeat(p) || !gen6_check_format(p->format))
return true;
if (pixmap && source_is_busy(pixmap))
return false;
return has_alphamap(p) || !gen6_check_filter(p) || need_upload(p);
}
static bool
@ -2430,6 +2450,7 @@ gen6_composite_fallback(struct sna *sna,
PixmapPtr src_pixmap;
PixmapPtr mask_pixmap;
PixmapPtr dst_pixmap;
bool src_fallback, mask_fallback;
if (!gen6_check_dst_format(dst->format)) {
DBG(("%s: unknown destination format: %d\n",
@ -2438,18 +2459,27 @@ gen6_composite_fallback(struct sna *sna,
}
dst_pixmap = get_drawable_pixmap(dst->pDrawable);
src_pixmap = src->pDrawable ? get_drawable_pixmap(src->pDrawable) : NULL;
mask_pixmap = (mask && mask->pDrawable) ? get_drawable_pixmap(mask->pDrawable) : NULL;
src_fallback = source_fallback(src, src_pixmap);
if (mask) {
mask_pixmap = mask->pDrawable ? get_drawable_pixmap(mask->pDrawable) : NULL;
mask_fallback = source_fallback(mask, mask_pixmap);
} else {
mask_pixmap = NULL;
mask_fallback = false;
}
/* If we are using the destination as a source and need to
* readback in order to upload the source, do it all
* on the cpu.
*/
if (src_pixmap == dst_pixmap && source_fallback(src)) {
if (src_pixmap == dst_pixmap && src_fallback) {
DBG(("%s: src is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
if (mask_pixmap == dst_pixmap && source_fallback(mask)) {
if (mask_pixmap == dst_pixmap && mask_fallback) {
DBG(("%s: mask is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
@ -2464,34 +2494,28 @@ gen6_composite_fallback(struct sna *sna,
return FALSE;
}
if (src_pixmap && !source_fallback(src)) {
priv = sna_pixmap(src_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (src_pixmap && !src_fallback) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !source_fallback(mask)) {
priv = sna_pixmap(mask_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !mask_fallback) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
/* However if the dst is not on the GPU and we need to
* render one of the sources using the CPU, we may
* as well do the entire operation in place onthe CPU.
*/
if (source_fallback(src)) {
if (src_fallback) {
DBG(("%s: dst is on the CPU and src will fallback\n",
__FUNCTION__));
return TRUE;
}
if (mask && source_fallback(mask)) {
if (mask && mask_fallback) {
DBG(("%s: dst is on the CPU and mask will fallback\n",
__FUNCTION__));
return TRUE;

View File

@ -2487,17 +2487,36 @@ need_upload(PicturePtr p)
}
static bool
source_fallback(PicturePtr p)
source_is_busy(PixmapPtr pixmap)
{
struct sna_pixmap *priv = sna_pixmap(pixmap);
if (priv == NULL || priv->clear)
return false;
if (priv->gpu_bo && kgem_bo_is_busy(priv->gpu_bo))
return true;
if (priv->cpu_bo && kgem_bo_is_busy(priv->cpu_bo))
return true;
return priv->gpu_damage && !priv->cpu_damage;
}
static bool
source_fallback(PicturePtr p, PixmapPtr pixmap)
{
if (sna_picture_is_solid(p, NULL))
return false;
return (has_alphamap(p) ||
is_gradient(p) ||
!gen7_check_filter(p) ||
!gen7_check_repeat(p) ||
!gen7_check_format(p->format) ||
need_upload(p));
if (is_gradient(p) ||
!gen7_check_repeat(p) ||
!gen7_check_format(p->format))
return true;
if (pixmap && source_is_busy(pixmap))
return false;
return has_alphamap(p) || !gen7_check_filter(p) || need_upload(p);
}
static bool
@ -2510,6 +2529,7 @@ gen7_composite_fallback(struct sna *sna,
PixmapPtr src_pixmap;
PixmapPtr mask_pixmap;
PixmapPtr dst_pixmap;
bool src_fallback, mask_fallback;
if (!gen7_check_dst_format(dst->format)) {
DBG(("%s: unknown destination format: %d\n",
@ -2518,18 +2538,27 @@ gen7_composite_fallback(struct sna *sna,
}
dst_pixmap = get_drawable_pixmap(dst->pDrawable);
src_pixmap = src->pDrawable ? get_drawable_pixmap(src->pDrawable) : NULL;
mask_pixmap = (mask && mask->pDrawable) ? get_drawable_pixmap(mask->pDrawable) : NULL;
src_fallback = source_fallback(src, src_pixmap);
if (mask) {
mask_pixmap = mask->pDrawable ? get_drawable_pixmap(mask->pDrawable) : NULL;
mask_fallback = source_fallback(src, mask_pixmap);
} else {
mask_pixmap = NULL;
mask_fallback = false;
}
/* If we are using the destination as a source and need to
* readback in order to upload the source, do it all
* on the cpu.
*/
if (src_pixmap == dst_pixmap && source_fallback(src)) {
if (src_pixmap == dst_pixmap && src_fallback) {
DBG(("%s: src is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
if (mask_pixmap == dst_pixmap && source_fallback(mask)) {
if (mask_pixmap == dst_pixmap && mask_fallback) {
DBG(("%s: mask is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
@ -2544,34 +2573,28 @@ gen7_composite_fallback(struct sna *sna,
return FALSE;
}
if (src_pixmap && !source_fallback(src)) {
priv = sna_pixmap(src_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (src_pixmap && !src_fallback) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !source_fallback(mask)) {
priv = sna_pixmap(mask_pixmap);
if (priv && priv->gpu_damage && !priv->cpu_damage) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (mask_pixmap && !mask_fallback) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
/* However if the dst is not on the GPU and we need to
* render one of the sources using the CPU, we may
* as well do the entire operation in place onthe CPU.
*/
if (source_fallback(src)) {
if (src_fallback) {
DBG(("%s: dst is on the CPU and src will fallback\n",
__FUNCTION__));
return TRUE;
}
if (mask && source_fallback(mask)) {
if (mask && mask_fallback) {
DBG(("%s: dst is on the CPU and mask will fallback\n",
__FUNCTION__));
return TRUE;

View File

@ -450,7 +450,6 @@ static inline bool kgem_bo_is_busy(struct kgem_bo *bo)
{
DBG_HDR(("%s: domain: %d exec? %d, rq? %d\n",
__FUNCTION__, bo->domain, bo->exec != NULL, bo->rq != NULL));
assert(bo->proxy == NULL);
return bo->rq;
}

View File

@ -871,10 +871,10 @@ _sna_pixmap_move_to_cpu(PixmapPtr pixmap, unsigned int flags)
return true;
}
DBG(("%s: gpu_bo=%d, gpu_damage=%p\n",
DBG(("%s: gpu_bo=%d, gpu_damage=%p, cpu_damage=%p, is-clear?=%d\n",
__FUNCTION__,
priv->gpu_bo ? priv->gpu_bo->handle : 0,
priv->gpu_damage));
priv->gpu_damage, priv->cpu_damage, priv->clear));
if ((flags & MOVE_READ) == 0) {
assert(flags & MOVE_WRITE);

View File

@ -1418,7 +1418,7 @@ sna_render_picture_fixup(struct sna *sna,
if (picture->alphaMap) {
DBG(("%s: alphamap\n", __FUNCTION__));
if ((is_gpu(picture->pDrawable) || is_gpu(picture->alphaMap->pDrawable))) {
if (is_gpu(picture->pDrawable) || is_gpu(picture->alphaMap->pDrawable)) {
return sna_render_picture_flatten(sna, picture, channel,
x, y, w, y, dst_x, dst_y);
}
@ -1428,7 +1428,7 @@ sna_render_picture_fixup(struct sna *sna,
if (picture->filter == PictFilterConvolution) {
DBG(("%s: convolution\n", __FUNCTION__));
if (picture->pDrawable && is_gpu(picture->pDrawable)) {
if (is_gpu(picture->pDrawable)) {
return sna_render_picture_convolve(sna, picture, channel,
x, y, w, h, dst_x, dst_y);
}

View File

@ -72,10 +72,10 @@ is_gpu(DrawablePtr drawable)
{
struct sna_pixmap *priv = sna_pixmap_from_drawable(drawable);
if (priv == NULL)
if (priv == NULL || priv->clear)
return false;
if (priv->gpu_damage)
if (priv->gpu_damage || (priv->gpu_bo && kgem_bo_is_busy(priv->gpu_bo)))
return true;
return priv->cpu_bo && kgem_bo_is_busy(priv->cpu_bo);