mirror of
https://github.com/X11Libre/xserver.git
synced 2026-03-24 03:44:06 +00:00
glx: Compute the GLX extension string from __glXScreenInit
Now that the enable bits are in the screen base class we can compute this in one place, rather than making every backend do it. Reviewed-by: Eric Anholt <eric@anholt.net> Reviewed-by: Emil Velikov <emil.velikov@collabora.com> Signed-off-by: Adam Jackson <ajax@redhat.com>
This commit is contained in:
@@ -383,6 +383,14 @@ __glXScreenInit(__GLXscreen * pGlxScreen, ScreenPtr pScreen)
|
||||
}
|
||||
|
||||
dixSetPrivate(&pScreen->devPrivates, glxScreenPrivateKey, pGlxScreen);
|
||||
|
||||
i = __glXGetExtensionString(pGlxScreen->glx_enable_bits, NULL);
|
||||
if (i > 0) {
|
||||
pGlxScreen->GLXextensions = xnfalloc(i);
|
||||
(void) __glXGetExtensionString(pGlxScreen->glx_enable_bits,
|
||||
pGlxScreen->GLXextensions);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void
|
||||
|
||||
Reference in New Issue
Block a user