aboutsummaryrefslogtreecommitdiff
path: root/console.c
diff options
context:
space:
mode:
authoraliguori <aliguori@c046a42c-6fe2-441c-8c8c-71466251a162>2009-03-13 15:02:13 +0000
committeraliguori <aliguori@c046a42c-6fe2-441c-8c8c-71466251a162>2009-03-13 15:02:13 +0000
commit7b5d76dae31881d3cdb6f748ad4e84ddd7b66f3e (patch)
treef001d6daf0d45f1a262ce900dedfab8a0594acfc /console.c
parent86dbdd4012427fe6b39664d5153abab05579a2f2 (diff)
DisplayAllocator interface (Stefano Stabellini)
Hi all, this patch adds a DisplayAllocator interface that allows display frontends (sdl in particular) to provide a preallocated display buffer for the graphical backend to use. Whenever a graphical backend cannot use qemu_create_displaysurface_from because its own internal pixel format cannot be exported directly (text mode or graphical mode with color depth 8 or 24), it creates another display buffer in memory using qemu_create_displaysurface and does the conversion. This new buffer needs to be blitted into the sdl surface buffer every time we need to update portions of the screen. We can avoid this using the DisplayAllocator interace: sdl provides its own implementation of qemu_create_displaysurface, giving back the sdl surface buffer directly (as we used to do before the DisplayState changes). Since the buffer returned by sdl could be in bgr format we need to put back in the handlers of that case. This approach is good if the two following conditions are true: 1) the sdl surface is a software surface that resides in main memory; 2) the host display color depth is either 16 or 32 bpp. If first condition is false we can have bad performances using sdl and vnc together. If the second condition is false performances are certainly not going to improve but they shouldn't get worse either. The first condition is always true, at least on linux/X11 systems; but I believe is true also on other platforms. The second condition is true in the vast majority of the cases. This patch should also have the good side effect of solving the sdl 2D slowness malc was reporting on MacOS, because SDL_BlitSurface is not going to be called anymore when the guest is in text mode or 24bpp. However the root problem is still present so I suspect we may still see some slowness on MacOS when the guest is in 32 or 16 bpp. Signed-off-by: Stefano Stabellini <stefano.stabellini@eu.citrix.com> Signed-off-by: Anthony Liguori <aliguori@us.ibm.com> git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@6839 c046a42c-6fe2-441c-8c8c-71466251a162
Diffstat (limited to 'console.c')
-rw-r--r--console.c26
1 files changed, 13 insertions, 13 deletions
diff --git a/console.c b/console.c
index 9aeac8c969..e37397d2a0 100644
--- a/console.c
+++ b/console.c
@@ -1068,8 +1068,7 @@ void console_select(unsigned int index)
DisplayState *ds = s->ds;
active_console = s;
if (ds_get_bits_per_pixel(s->ds)) {
- ds->surface = qemu_resize_displaysurface(ds->surface, s->g_width,
- s->g_height, 32, 4 * s->g_width);
+ ds->surface = qemu_resize_displaysurface(ds, s->g_width, s->g_height);
} else {
s->ds->surface->width = s->width;
s->ds->surface->height = s->height;
@@ -1277,11 +1276,12 @@ DisplayState *graphic_console_init(vga_hw_update_ptr update,
DisplayState *ds;
ds = (DisplayState *) qemu_mallocz(sizeof(DisplayState));
- ds->surface = qemu_create_displaysurface(640, 480, 32, 640 * 4);
+ ds->allocator = &default_allocator;
+ ds->surface = qemu_create_displaysurface(ds, 640, 480);
s = new_console(ds, GRAPHIC_CONSOLE);
if (s == NULL) {
- qemu_free_displaysurface(ds->surface);
+ qemu_free_displaysurface(ds);
qemu_free(ds);
return NULL;
}
@@ -1429,7 +1429,7 @@ void qemu_console_resize(DisplayState *ds, int width, int height)
s->g_width = width;
s->g_height = height;
if (is_graphic_console()) {
- ds->surface = qemu_resize_displaysurface(ds->surface, width, height, 32, 4 * width);
+ ds->surface = qemu_resize_displaysurface(ds, width, height);
dpy_resize(ds);
}
}
@@ -1552,14 +1552,14 @@ PixelFormat qemu_default_pixelformat(int bpp)
return pf;
}
-DisplaySurface* qemu_create_displaysurface(int width, int height, int bpp, int linesize)
+DisplaySurface* defaultallocator_create_displaysurface(int width, int height)
{
DisplaySurface *surface = (DisplaySurface*) qemu_mallocz(sizeof(DisplaySurface));
surface->width = width;
surface->height = height;
- surface->linesize = linesize;
- surface->pf = qemu_default_pixelformat(bpp);
+ surface->linesize = width * 4;
+ surface->pf = qemu_default_pixelformat(32);
#ifdef WORDS_BIGENDIAN
surface->flags = QEMU_ALLOCATED_FLAG | QEMU_BIG_ENDIAN_FLAG;
#else
@@ -1570,13 +1570,13 @@ DisplaySurface* qemu_create_displaysurface(int width, int height, int bpp, int l
return surface;
}
-DisplaySurface* qemu_resize_displaysurface(DisplaySurface *surface,
- int width, int height, int bpp, int linesize)
+DisplaySurface* defaultallocator_resize_displaysurface(DisplaySurface *surface,
+ int width, int height)
{
surface->width = width;
surface->height = height;
- surface->linesize = linesize;
- surface->pf = qemu_default_pixelformat(bpp);
+ surface->linesize = width * 4;
+ surface->pf = qemu_default_pixelformat(32);
if (surface->flags & QEMU_ALLOCATED_FLAG)
surface->data = (uint8_t*) qemu_realloc(surface->data, surface->linesize * surface->height);
else
@@ -1607,7 +1607,7 @@ DisplaySurface* qemu_create_displaysurface_from(int width, int height, int bpp,
return surface;
}
-void qemu_free_displaysurface(DisplaySurface *surface)
+void defaultallocator_free_displaysurface(DisplaySurface *surface)
{
if (surface == NULL)
return;