我正在编写屏幕快照,并在Mac上找到了这个很棒的话题:How can I get screenshot from all displays on MAC?
我想知道是否有人拥有x11库的等效库?要获取所有监视器,然后将它们全部截屏?
我找到了这个主题:https://stackoverflow.com/a/5293559/1828637
但是对于像我这样的新手来说,从那里链接的代码并不容易遵循。
RootWindow(3)会得到所有监视器的总面积吗?然后,我可以查看监视器的尺寸,然后在RootWindow返回时使用XGetImage这些部分?
我遇到过这个主题:How do take a screenshot correctly with xlib?但是我不确定它是否具有多显示器支持。我在ctypes中执行此操作,因此我无法轻松地测试该代码,而无需经历先编写代码的艰巨任务。所以我想知道这是否正确,或者我将如何修改它以处理多星期一?
那里的海报分享了他的代码,在这里可以看到:https://github.com/Lalaland/ScreenCap/blob/master/src/screenCapturerImpl.cpp#L96,但是它很复杂,我不理解。它使用了XFixesGetCursorImage
之类的功能,我在文档中找不到这些功能,而且我看不到多显示器在那儿如何工作。该主题的作者警告说,他不记得该代码了,它可能不适用于现代Linux。
这不是对问题的完美答案,但是可以修改以下代码以获得所需最终结果的快速版本:https://github.com/Clodo76/vr-desktop-mirror/blob/master/DesktopCapture/main.cpp
DesktopCapturePlugin_Initialize方法将所有显示转换为对象:
UNITY_INTERFACE_EXPORT void UNITY_INTERFACE_API DesktopCapturePlugin_Initialize()
{
DesksClean();
g_needReinit = 0;
IDXGIFactory1* factory;
CreateDXGIFactory1(__uuidof(IDXGIFactory1), reinterpret_cast<void**>(&factory));
IDXGIAdapter1* adapter;
for (int i = 0; (factory->EnumAdapters1(i, &adapter) != DXGI_ERROR_NOT_FOUND); ++i)
{
IDXGIOutput* output;
for (int j = 0; (adapter->EnumOutputs(j, &output) != DXGI_ERROR_NOT_FOUND); j++)
{
DXGI_OUTPUT_DESC outputDesc;
output->GetDesc(&outputDesc);
MONITORINFOEX monitorInfo;
monitorInfo.cbSize = sizeof(MONITORINFOEX);
GetMonitorInfo(outputDesc.Monitor, &monitorInfo);
// Maybe in future add a function to identify the primary monitor.
//if (monitorInfo.dwFlags == MONITORINFOF_PRIMARY)
{
int iDesk = DeskAdd();
g_desks[iDesk].g_width = monitorInfo.rcMonitor.right - monitorInfo.rcMonitor.left;
g_desks[iDesk].g_height = monitorInfo.rcMonitor.bottom - monitorInfo.rcMonitor.top;
auto device = g_unity->Get<IUnityGraphicsD3D11>()->GetDevice();
IDXGIOutput1* output1;
output1 = reinterpret_cast<IDXGIOutput1*>(output);
output1->DuplicateOutput(device, &g_desks[iDesk].g_deskDupl);
}
output->Release();
}
adapter->Release();
}
factory->Release();
}
然后,OnRenderEvent方法将显示中的帧复制到纹理(在这种情况下,由单位提供):
void UNITY_INTERFACE_API OnRenderEvent(int eventId)
{
for (int iDesk = 0; iDesk < g_nDesks; iDesk++)
{
if (g_desks[iDesk].g_deskDupl == nullptr || g_desks[iDesk].g_texture == nullptr)
{
g_needReinit++;
return;
}
IDXGIResource* resource = nullptr;
const UINT timeout = 0; // ms
HRESULT resultAcquire = g_desks[iDesk].g_deskDupl->AcquireNextFrame(timeout, &g_desks[iDesk].g_frameInfo, &resource);
if (resultAcquire != S_OK)
{
g_needReinit++;
return;
}
g_desks[iDesk].g_isPointerVisible = (g_desks[iDesk].g_frameInfo.PointerPosition.Visible == TRUE);
g_desks[iDesk].g_pointerX = g_desks[iDesk].g_frameInfo.PointerPosition.Position.x;
g_desks[iDesk].g_pointerY = g_desks[iDesk].g_frameInfo.PointerPosition.Position.y;
ID3D11Texture2D* texture;
HRESULT resultQuery = resource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&texture));
resource->Release();
if (resultQuery != S_OK)
{
g_needReinit++;
return;
}
ID3D11DeviceContext* context;
auto device = g_unity->Get<IUnityGraphicsD3D11>()->GetDevice();
device->GetImmediateContext(&context);
context->CopyResource(g_desks[iDesk].g_texture, texture);
g_desks[iDesk].g_deskDupl->ReleaseFrame();
}
g_needReinit = 0;
}