macOS: Make retina OpenGL work on VMware
Qt requests full resolution OpenGL surfaces by default. However, when running as a VMware guest it looks like the OS silently creates low-resolution surfaces. This is not possible to detect using the standard NSWindow APIs for converting to backing coordinates or for reading the backing scale factor. The result of this is that Qt will incorrectly display one quarter of the window content only. Fall back to detecting if the OpenGL renderer is the Apple software renderer, which it will be on VMware. Cancel the high-resolution surface request if this is the case. This needs to be done while we have a valid OpenGL context. Task-number: QTBUG-62357 Change-Id: I33bf12b3bb0408249e6d66e0a8ca86b044bea781 Reviewed-by: Tor Arne Vestbø <tor.arne.vestbo@qt.io>
This commit is contained in:
parent
ae55e75b1b
commit
e739e984c3
@ -84,6 +84,7 @@ private:
|
||||
NSOpenGLContext *m_shareContext;
|
||||
QSurfaceFormat m_format;
|
||||
QPointer<QWindow> m_currentWindow;
|
||||
bool m_didCheckForSoftwareContext;
|
||||
};
|
||||
|
||||
QT_END_NAMESPACE
|
||||
|
@ -121,7 +121,8 @@ QCocoaGLContext::QCocoaGLContext(const QSurfaceFormat &format, QPlatformOpenGLCo
|
||||
const QVariant &nativeHandle)
|
||||
: m_context(nil),
|
||||
m_shareContext(nil),
|
||||
m_format(format)
|
||||
m_format(format),
|
||||
m_didCheckForSoftwareContext(false)
|
||||
{
|
||||
if (!nativeHandle.isNull()) {
|
||||
if (!nativeHandle.canConvert<QCocoaNativeContext>()) {
|
||||
@ -262,6 +263,22 @@ bool QCocoaGLContext::makeCurrent(QPlatformSurface *surface)
|
||||
|
||||
QWindow *window = static_cast<QCocoaWindow *>(surface)->window();
|
||||
setActiveWindow(window);
|
||||
|
||||
// Disable high-resolution surfaces when using the software renderer, which has the
|
||||
// problem that the system silently falls back to a to using a low-resolution buffer
|
||||
// when a high-resolution buffer is requested. This is not detectable using the NSWindow
|
||||
// convertSizeToBacking and backingScaleFactor APIs. A typical result of this is that Qt
|
||||
// will display a quarter of the window content when running in a virtual machine.
|
||||
if (!m_didCheckForSoftwareContext) {
|
||||
m_didCheckForSoftwareContext = true;
|
||||
|
||||
const GLubyte* renderer = glGetString(GL_RENDERER);
|
||||
if (qstrcmp((const char *)renderer, "Apple Software Renderer") == 0) {
|
||||
NSView *view = static_cast<QCocoaWindow *>(surface)->m_view;
|
||||
[view setWantsBestResolutionOpenGLSurface:NO];
|
||||
}
|
||||
}
|
||||
|
||||
update();
|
||||
return true;
|
||||
}
|
||||
|
@ -198,6 +198,7 @@ void QCocoaWindow::initialize()
|
||||
BOOL enable = qt_mac_resolveOption(YES, window(), "_q_mac_wantsBestResolutionOpenGLSurface",
|
||||
"QT_MAC_WANTS_BEST_RESOLUTION_OPENGL_SURFACE");
|
||||
[m_view setWantsBestResolutionOpenGLSurface:enable];
|
||||
// See also QCocoaGLContext::makeCurrent for software renderer workarounds.
|
||||
}
|
||||
BOOL enable = qt_mac_resolveOption(NO, window(), "_q_mac_wantsLayer",
|
||||
"QT_MAC_WANTS_LAYER");
|
||||
|
Loading…
Reference in New Issue
Block a user