Rick James
Rick James

Reputation: 1

Rendering Texture from Unity Rendering Stream into a Sphere

I'm working on a Unity project where I'm using an edited version of the WebApp (Bidirectional sample) to stream a 360 video file to Unity via WebRTC. On the Unity side, I'm using the ReceiverSample.cs script to receive the video stream. However, even after following guides and with the help of ChatGPT I was not able to figure out how to change the texture from being rendered in canvas to a sphere. As of now, I am currently able to successfully stream from WebApp to the Unity. But I want to specifically render the video on a sphere or world-like environment to watch with a Virtual Reality glasses.

ReceiverSample.cs

using UnityEngine;
using UnityEngine.UI;

namespace Unity.RenderStreaming
{
    public class ReceiverSample : MonoBehaviour
    {
#pragma warning disable 0649
        [SerializeField] private Button startButton;
        [SerializeField] private Button stopButton;
        [SerializeField] private InputField connectionIdInput;
        [SerializeField] private RawImage remoteVideoImage;
        [SerializeField] private ReceiveVideoViewer receiveVideoViewer;
        [SerializeField] private SingleConnection connection;
#pragma warning restore 0649

        private string connectionId;

        void Awake()
        {
            startButton.onClick.AddListener(OnStart);
            stopButton.onClick.AddListener(OnStop);
            if (connectionIdInput != null)
                connectionIdInput.onValueChanged.AddListener(input => connectionId = input);
            receiveVideoViewer.OnUpdateReceiveTexture += texture => remoteVideoImage.texture = texture;
        }

        private void OnStart()
        {
            if (string.IsNullOrEmpty(connectionId))
            {
                connectionId = System.Guid.NewGuid().ToString("N");
                connectionIdInput.text = connectionId;
            }
            connectionIdInput.interactable = false;

            connection.CreateConnection(connectionId, true);

            startButton.gameObject.SetActive(false);
            stopButton.gameObject.SetActive(true);
        }

        private void OnStop()
        {
            connection.DeleteConnection(connectionId);
            connectionId = string.Empty;
            connectionIdInput.text = string.Empty;
            connectionIdInput.interactable = true;
            startButton.gameObject.SetActive(true);
            stopButton.gameObject.SetActive(false);
        }
    }
}

sendvideo.js:

export class SendVideo {
  constructor(localVideoElement, remoteVideoElement) {
    this.localVideo = localVideoElement;
    this.remoteVideo = remoteVideoElement;
    this.peerConnection = null; // WebRTC PeerConnection
  }

  async startLocalVideo() {
    try {
      const videoElement = document.createElement('video');
      videoElement.src = '/videos/video.mp4'; // Path to your video file
      videoElement.muted = true;
      await videoElement.play();

      const stream = videoElement.captureStream();
      this.localVideo.srcObject = stream;
      await this.localVideo.play();

      // Initialize WebRTC connection for video streaming
      this.initializePeerConnection(stream);
    } catch (err) {
      console.error(`Error starting local video: ${err}`);
    }
  }

  // Set up WebRTC connection and send the video stream to Unity
  initializePeerConnection(stream) {
    this.peerConnection = new RTCPeerConnection();

    // Add video tracks to the connection
    stream.getTracks().forEach(track => {
      this.peerConnection.addTrack(track, stream);
    });

    // Event when connection is established
    this.peerConnection.onconnectionstatechange = () => {
      if (this.peerConnection.connectionState === 'connected') {
        console.log("WebRTC connection established with Unity");
      }
    };

    // Offer to connect to Unity
    this.peerConnection.createOffer().then(offer => {
      return this.peerConnection.setLocalDescription(offer);
    }).then(() => {
      // Send the offer to the Unity client via signaling server
      console.log("Offer created and sent to Unity client");
    }).catch(err => console.error(err));
  }

  addRemoteTrack(track) {
    if (this.remoteVideo.srcObject == null) {
      this.remoteVideo.srcObject = new MediaStream();
    }
    this.remoteVideo.srcObject.addTrack(track);
  }
}

Above are the minimal reproducible version of the codes that I am using from the WebApp (bidirectional sample) and the Unity (ReceiverSample) to stream the video.

enter image description here

This is the Canvas that currrently renders the video.

I would like to know the easiest way to render the video on a sphere in a way that like we would when watching a 360 video on a VR glass.

I tried creating a RenderTexture and a Material and then draggin the render texture into the material and the material into the sphere, but the canvas always takes up the world and I cannot view the video. There is a black sphere that remains as a layer between the Sphere I added and the video, I wasn't able to fix it.

Upvotes: 0

Views: 40

Answers (0)

Related Questions