sna/gen6: Reduce dst readbacks for unsupported sources

Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
This commit is contained in:
Chris Wilson 2011-12-08 12:35:54 +00:00
parent bc68211d18
commit 440ac68ec0
1 changed files with 109 additions and 0 deletions

View File

@ -2081,6 +2081,109 @@ try_blt(struct sna *sna, int width, int height)
return FALSE;
}
static bool
is_solid(PicturePtr picture)
{
return picture->pDrawable->width == 1 &&
picture->pDrawable->height == 1 &&
picture->repeat;
}
static bool
is_gradient(PicturePtr picture)
{
if (picture->pDrawable)
return FALSE;
return picture->pSourcePict->type != SourcePictTypeSolidFill;
}
static bool
source_fallback(PicturePtr p)
{
return is_gradient(p) || !gen6_check_filter(p) || !gen6_check_repeat(p);
}
static bool
gen6_composite_fallback(struct sna *sna,
PicturePtr src,
PicturePtr mask,
PicturePtr dst)
{
struct sna_pixmap *priv;
PixmapPtr src_pixmap;
PixmapPtr mask_pixmap;
PixmapPtr dst_pixmap;
if (!gen6_check_dst_format(dst->format)) {
DBG(("%s: unknown destination format: %d\n",
__FUNCTION__, dst->format));
return TRUE;
}
dst_pixmap = get_drawable_pixmap(dst->pDrawable);
src_pixmap = src->pDrawable ? get_drawable_pixmap(src->pDrawable) : NULL;
mask_pixmap = (mask && mask->pDrawable) ? get_drawable_pixmap(mask->pDrawable) : NULL;
/* If we are using the destination as a source and need to
* readback in order to upload the source, do it all
* on the cpu.
*/
if (src_pixmap == dst_pixmap && source_fallback(src)) {
DBG(("%s: src is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
if (mask_pixmap == dst_pixmap && source_fallback(mask)) {
DBG(("%s: mask is dst and will fallback\n",__FUNCTION__));
return TRUE;
}
/* If anything is on the GPU, push everything out to the GPU */
priv = sna_pixmap(dst_pixmap);
if (priv && priv->gpu_damage) {
DBG(("%s: dst is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
if (src_pixmap && !is_solid(src) && !source_fallback(src)) {
priv = sna_pixmap(src_pixmap);
if (priv && priv->gpu_damage) {
DBG(("%s: src is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
}
if (mask_pixmap && !is_solid(mask) && !source_fallback(mask)) {
priv = sna_pixmap(mask_pixmap);
if (priv && priv->gpu_damage) {
DBG(("%s: mask is already on the GPU, try to use GPU\n",
__FUNCTION__));
return FALSE;
}
}
/* However if the dst is not on the GPU and we need to
* render one of the sources using the CPU, we may
* as well do the entire operation in place onthe CPU.
*/
if (source_fallback(src)) {
DBG(("%s: dst is on the CPU and src will fallback\n",
__FUNCTION__));
return TRUE;
}
if (mask && source_fallback(mask)) {
DBG(("%s: dst is on the CPU and mask will fallback\n",
__FUNCTION__));
return TRUE;
}
DBG(("%s: dst is not on the GPU and the operation should not fallback\n",
__FUNCTION__));
return FALSE;
}
static Bool
gen6_render_composite(struct sna *sna,
uint8_t op,
@ -2119,6 +2222,9 @@ gen6_render_composite(struct sna *sna,
width, height, tmp))
return TRUE;
if (gen6_composite_fallback(sna, src, mask, dst))
return FALSE;
if (need_tiling(sna, width, height))
return sna_tiling_composite(op, src, mask, dst,
src_x, src_y,
@ -2482,6 +2588,9 @@ gen6_render_composite_spans(struct sna *sna,
if (need_tiling(sna, width, height))
return FALSE;
if (gen6_composite_fallback(sna, src, NULL, dst))
return FALSE;
tmp->base.op = op;
if (!gen6_composite_set_target(&tmp->base, dst))
return FALSE;