我想使用 SharpDX 4.2.0 在 WPF 应用程序中的 D3DImage 上显示 byte[],即 jpg。
据我所知,我应该执行以下步骤来完成此操作。
编辑: 我制作了两个 D3D11.Texture2D,一个用于写入像素,一个用于 d3d9 共享,它已编译,但屏幕没有显示任何内容。我希望有人指导我为什么它不显示图像。
步骤 1.
Stream
至 SharpDX.Direct3D11.Texture2D
第2步.创建
SharpDX.Direct3D11.Texture2D
d3dTexture_Draw
并在其上写入像素。
步骤 3. 创建
SharpDX.Direct3D11.Texture2D
_finalTexture
分享至 Direct3D9.Texture
步骤 4. d11dContext.CopyResource(d3dTexture_Draw, _finalTexture);
步骤 5. 分享
_finalTexture
至 Direct3D9.Texture
步骤 6.
D3DImage.SetBackBuffer
Direct3D9.Texture
的表面
我希望能得到任何帮助。
@Simon Mourier:我正在尝试你的方法,但还没有完成。
谢谢。
private void LoadBmpToTexture(string filePath)
{
// Load the JPEG into a Bitmap
using (System.Drawing.Bitmap bitmap = new System.Drawing.Bitmap(filePath))
{
SharpDX.Direct3D11.Device device = new SharpDX.Direct3D11.Device(DriverType.Hardware, DeviceCreationFlags.None | DeviceCreationFlags.BgraSupport, SharpDX.Direct3D.FeatureLevel.Level_11_0);
SharpDX.Direct3D11.DeviceContext d11dContext = device.ImmediateContext;
byte[] byteArray = ConvertBitmapToByteArray(bitmap);
MemoryStream memoryStream = new MemoryStream(byteArray);
using (var imagingFactory = new SharpDX.WIC.ImagingFactory2())
{
var decoder = new SharpDX.WIC.BitmapDecoder(imagingFactory, memoryStream, DecodeOptions.CacheOnLoad);
var frame = decoder.GetFrame(0);
var formatConverter = new FormatConverter(imagingFactory);
formatConverter.Initialize(frame, SharpDX.WIC.PixelFormat.Format32bppPBGRA);
var width = formatConverter.Size.Width;
var height = formatConverter.Size.Height;
var textureDesc = new Texture2DDescription
{
Width = width,
Height = height,
MipLevels = 1,
ArraySize = 1,
Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
SampleDescription = new SampleDescription(1, 0),
Usage = ResourceUsage.Dynamic,
BindFlags = BindFlags.ShaderResource,
CpuAccessFlags = CpuAccessFlags.Write,
};
var d3dTexture_Draw = new Texture2D(device, textureDesc);
// Copy data to Texture2D
var dataBox = d11dContext.MapSubresource(d3dTexture_Draw, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None);
using (var wicStream = new DataStream(dataBox.DataPointer, width * height * 4, true, true))
{
formatConverter.CopyPixels(width * 4, wicStream);
}
d11dContext.UnmapSubresource(d3dTexture_Draw, 0);
var final_descriptor = new Texture2DDescription()
{
Width = (int)img.Width,
Height = (int)img.Height,
ArraySize = 1,
BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource,
Usage = ResourceUsage.Default,
CpuAccessFlags = CpuAccessFlags.None,
Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
MipLevels = 1,
OptionFlags = ResourceOptionFlags.Shared,
SampleDescription = new SampleDescription(1, 0)
};
Texture2D _finalTexture = new Texture2D(device, final_descriptor);
d11dContext.CopyResource(d3dTexture_Draw, _finalTexture);
SharpDX.Direct3D9.Direct3DEx d9context = new SharpDX.Direct3D9.Direct3DEx();
SharpDX.Direct3D9.Device d9device = new SharpDX.Direct3D9.Device(d9context,
0,
DeviceType.Hardware,
IntPtr.Zero,
CreateFlags.HardwareVertexProcessing,
new SharpDX.Direct3D9.PresentParameters()
{
Windowed = true,
SwapEffect = SharpDX.Direct3D9.SwapEffect.Discard,
DeviceWindowHandle = new WindowInteropHelper(this).Handle,
PresentationInterval = PresentInterval.Immediate,
});
IntPtr renderTextureHandle = _finalTexture.QueryInterface<SharpDX.DXGI.Resource>().SharedHandle;
SharpDX.Direct3D9.Texture d9texture = new SharpDX.Direct3D9.Texture(d9device,
_finalTexture.Description.Width,
_finalTexture.Description.Height,
1,
SharpDX.Direct3D9.Usage.RenderTarget,
SharpDX.Direct3D9.Format.A8R8G8B8,
Pool.Default,
ref renderTextureHandle);
d3dImage = new D3DImage();
d3dImage.Lock();
d3dImage.SetBackBuffer(D3DResourceType.IDirect3DSurface9, d9texture.GetSurfaceLevel(0).NativePointer, true);
//Image d3dImage : Width=720, Height=370
d3dImage.AddDirtyRect(new Int32Rect(0, 0, 720, 370));
d3dImage.Unlock();
img.Source= d3dImage;
}
}
}
这是仅使用 WIC 和 DirectX 9 的解决方案:
public partial class MainWindow : Window
{
private readonly D3DImage _d3dImage = new();
// you can keep the device if you want to use it
private readonly SharpDX.Direct3D9.Device _device;
public MainWindow()
{
InitializeComponent();
Loaded += (s, e) => LoadJpegToTexture(@"sample.bmp");
var hwnd = new WindowInteropHelper(this).EnsureHandle();
using var context = new SharpDX.Direct3D9.Direct3D();
_device = new SharpDX.Direct3D9.Device(context,
0,
SharpDX.Direct3D9.DeviceType.Hardware,
hwnd,
SharpDX.Direct3D9.CreateFlags.HardwareVertexProcessing,
new SharpDX.Direct3D9.PresentParameters()
{
Windowed = true,
SwapEffect = SharpDX.Direct3D9.SwapEffect.Discard,
DeviceWindowHandle = hwnd,
PresentationInterval = SharpDX.Direct3D9.PresentInterval.Immediate,
});
img.Source = _d3dImage;
}
private void LoadJpegToTexture(string filePath)
{
using var imagingFactory = new SharpDX.WIC.ImagingFactory2();
using var decoder = new SharpDX.WIC.BitmapDecoder(imagingFactory, filePath, SharpDX.WIC.DecodeOptions.CacheOnDemand);
using var frame = decoder.GetFrame(0);
// depending on the image's pixel format, you must convert to BGRA
using var converter = new SharpDX.WIC.FormatConverter(imagingFactory);
converter.Initialize(frame, SharpDX.WIC.PixelFormat.Format32bppBGRA);
// create a render target surface
using var surface = SharpDX.Direct3D9.Surface.CreateRenderTarget(_device,
frame.Size.Width,
frame.Size.Height,
SharpDX.Direct3D9.Format.A8R8G8B8,
SharpDX.Direct3D9.MultisampleType.None,
0,
true
);
// copy pixels from WIC to surface
var rc = surface.LockRectangle(SharpDX.Direct3D9.LockFlags.None);
converter.CopyPixels(rc.Pitch, rc.DataPointer, frame.Size.Height * rc.Pitch);
surface.UnlockRectangle();
_d3dImage.Lock();
_d3dImage.SetBackBuffer(D3DResourceType.IDirect3DSurface9, surface.NativePointer, true);
_d3dImage.AddDirtyRect(new Int32Rect(0, 0, frame.Size.Width, frame.Size.Height));
_d3dImage.Unlock();
}
}