glx: Compute the GLX extension string from __glXScreenInit

Now that the enable bits are in the screen base class we can compute
this in one place, rather than making every backend do it.

Reviewed-by: Eric Anholt <eric@anholt.net>
Reviewed-by: Emil Velikov <emil.velikov@collabora.com>
Signed-off-by: Adam Jackson <ajax@redhat.com>
This commit is contained in:
Adam Jackson
2016-03-23 15:41:24 -04:00
parent e21de4bf3c
commit 2e8781ead3
5 changed files with 8 additions and 49 deletions

View File

@@ -383,6 +383,14 @@ __glXScreenInit(__GLXscreen * pGlxScreen, ScreenPtr pScreen)
}
dixSetPrivate(&pScreen->devPrivates, glxScreenPrivateKey, pGlxScreen);
i = __glXGetExtensionString(pGlxScreen->glx_enable_bits, NULL);
if (i > 0) {
pGlxScreen->GLXextensions = xnfalloc(i);
(void) __glXGetExtensionString(pGlxScreen->glx_enable_bits,
pGlxScreen->GLXextensions);
}
}
void