gma500: support 1080p

The problem in console mode is lack of linear memory. We can solve that by
dropping to 16bpp. The mode setting X server will allocate its own GEM
framebuffer in 32bpp and all will be well.

We could just do 16bpp anyway but that would be a regression on the lower
modes as many distributions don't yet ship the generic mode setting KMS
drivers.

Signed-off-by: Alan Cox <alan@linux.intel.com>
Signed-off-by: Dave Airlie <airlied@redhat.com>
diff --git a/drivers/gpu/drm/gma500/framebuffer.c b/drivers/gpu/drm/gma500/framebuffer.c
index 8ea202f..c2cf6bf 100644
--- a/drivers/gpu/drm/gma500/framebuffer.c
+++ b/drivers/gpu/drm/gma500/framebuffer.c
@@ -543,9 +543,25 @@
 				struct drm_fb_helper_surface_size *sizes)
 {
 	struct psb_fbdev *psb_fbdev = (struct psb_fbdev *)helper;
+	struct drm_device *dev = psb_fbdev->psb_fb_helper.dev;
+	struct drm_psb_private *dev_priv = dev->dev_private;
 	int new_fb = 0;
+	int bytespp;
 	int ret;
 
+	bytespp = sizes->surface_bpp / 8;
+	if (bytespp == 3)	/* no 24bit packed */
+		bytespp = 4;
+
+	/* If the mode will not fit in 32bit then switch to 16bit to get
+	   a console on full resolution. The X mode setting server will
+	   allocate its own 32bit GEM framebuffer */
+	if (ALIGN(sizes->fb_width * bytespp, 64) * sizes->fb_height >
+	                dev_priv->vram_stolen_size) {
+                sizes->surface_bpp = 16;
+                sizes->surface_depth = 16;
+        }
+
 	if (!helper->fb) {
 		ret = psbfb_create(psb_fbdev, sizes);
 		if (ret)