Reputation: 97
I'm working on a UWP application in Unity with SharpDX and ffmpeginteropX, my goal is to be able to draw the frames from ffmpeg onto a texture within Unity. Currently I can draw to the Unity texture, and I can capture the frames from ffmpeg, but I cannot figure out how to combine the two. Previously I manually copied the frames via Unity but it was insanely slow copying from the GPU and CPU.
Here is how I have SharpDX and my texture set up:
// Class member variables
public UnityEngine.UI.RawImage target;
SharpDX.Direct3D11.Texture2D m_DstTexture;
SharpDX.Direct3D11.Device device;
SharpDX.Direct3D11.DeviceContext deviceContext;
// Dummy texture hack to get device and context from Unity
UnityEngine.Texture2D targetX = new UnityEngine.Texture2D(512, 512, TextureFormat.BGRA32, false);
IntPtr texturePtr = targetX.GetNativeTexturePtr();
SharpDX.Direct3D11.Texture2D dstTextureX = new SharpDX.Direct3D11.Texture2D(texturePtr);
// Create DirectX device and context from texture
device = dstTextureX.Device;
deviceContext = device.ImmediateContext;
// Create shared texture for FFmpeg and Unity to access
SharpDX.Direct3D11.Texture2DDescription sharedTexture2DDescription = dstTextureX.Description;
sharedTexture2DDescription.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
m_DstTexture = new SharpDX.Direct3D11.Texture2D(device, sharedTexture2DDescription);
// Get Shader Resource View to shared texture
var d3d11ShaderResourceView = new ShaderResourceView(device, m_DstTexture);
// Assign the shader resource view to the target RawImage
target.texture = UnityEngine.Texture2D.CreateExternalTexture(512, 512, TextureFormat.BGRA32, false, false, texturePtr);
FFmpeg setup:
// Member variables
private MediaPlayer mediaPlayer;
private MediaPlaybackItem mediaPlaybackItem;
private FFmpegMediaSource ffmpegMediaSource;
private SoftwareBitmap frameServerDest = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 512, 512, BitmapAlphaMode.Premultiplied );
private UnityEngine.Texture2D tex;
private CanvasDevice canvasDevice;
private IDirect3DSurface surface;
private async void InitializeMediaPlayer()
{
try
{
FFmpegInteropLogging.SetDefaultLogProvider();
MediaSourceConfig configuration = new MediaSourceConfig()
{
MaxVideoThreads = 8,
SkipErrors = uint.MaxValue,
ReadAheadBufferDuration = TimeSpan.Zero,
FastSeek = true,
VideoDecoderMode = VideoDecoderMode.ForceFFmpegSoftwareDecoder
};
Debug.Log("FFmpegInteropX configuration set up successfully.");
// Sample stream source
string uri = "https://test-videos.co.uk/vids/sintel/mp4/h264/720/Sintel_720_10s_1MB.mp4";
//string uri = "udp://@192.168.10.1:11111";
// Create FFmpegMediaSource from sample stream
Debug.Log($"Attempting to create media source from URI: {uri}");
ffmpegMediaSource = await FFmpegMediaSource.CreateFromUriAsync(uri, configuration);
// Create MediaPlaybackItem from FFmpegMediaSource
mediaPlaybackItem = ffmpegMediaSource.CreateMediaPlaybackItem();
mediaPlayer = new MediaPlayer
{
Source = mediaPlaybackItem,
IsVideoFrameServerEnabled = true
};
mediaPlayer.VideoFrameAvailable += MediaPlayer_VideoFrameAvailable;
mediaPlayer.Play();
}
catch (Exception ex)
{
Debug.LogError($"Exception while setting up FFmpegInteropX: {ex.Message}");
}
}
Event for handling frames, this is where conversion should happen
private async void MediaPlayer_VideoFrameAvailable(MediaPlayer sender, object args)
{
try
{
// Here frame should be copied to target, however types are not the same
// MediaPlayer returns frames as IDirect3DSurface
sender.CopyFrameToVideoSurface(surface);
}
catch (Exception ex)
{
Debug.LogError($"Exception during VideoFrameAvailable: {ex.Message}");
}
}
Upvotes: 1
Views: 69