import React from "react";
import { useWebRTC, RealtimeVideo } from "@outspeed/react";
import { createConfig } from "@outspeed/core";

export default function App() {
  const { 
    connect,
    connectionStatus,
    getRemoteVideoTrack,
    getLocalVideoTrack
    } = useWebRTC({
      config: createConfig({
        // Add your function URL.
        functionURL: "<my-function-url>", 
        audioConstraints: {},
        videoConstraints: {}
      }),
    });

  return (
    <div>
      <span>Connection Status: {connectionStatus}</span>
      {connectionStatus === "SetupCompleted" && (
        <button onClick={connect}>Connect</button>
      )}
      {/* To show remote video stream */}
      <RealtimeVideo track={getRemoteVideoTrack()} />
      {/* To show local video stream */}
      <RealtimeVideo track={getLocalVideoTrack()} />
    </div>
  );
}
We assume you have already deployed your backend. If not, please follow this tutorial to deploy your backend with outspeed-client.
import React from "react";
import { useWebRTC, RealtimeVideo } from "@outspeed/react";
import { createConfig } from "@outspeed/core";

export default function App() {
  const { 
    connect,
    connectionStatus,
    getRemoteVideoTrack,
    getLocalVideoTrack
    } = useWebRTC({
      config: createConfig({
        // Add your function URL.
        functionURL: "<my-function-url>", 
        audioConstraints: {},
        videoConstraints: {}
      }),
    });

  return (
    <div>
      <span>Connection Status: {connectionStatus}</span>
      {connectionStatus === "SetupCompleted" && (
        <button onClick={connect}>Connect</button>
      )}
      {/* To show remote video stream */}
      <RealtimeVideo track={getRemoteVideoTrack()} />
      {/* To show local video stream */}
      <RealtimeVideo track={getLocalVideoTrack()} />
    </div>
  );
}

The code shown establishes a peer connection with the backend and streams local audio and video to it. If the backend is configured to return the audio and video, they will be displayed as well.

  • We import necessary components from the @outspeed/react and @outspeed/core libraries.
  • useWebRTC: This hook is used to manage WebRTC connections. It provides methods like connect, getRemoteVideoTrack, and getLocalVideoTrack. Visit here to learn more about useWebRTC.
  • The useWebRTC hook is set up with a URL (functionURL) and options to enable audio and video. We can also use useCreateConfig to create a config, visit here.
    • audioConstraints: {}: This selects the default audio input device and streams the audio to the backend. You can also specify any MediaTrackConstraints to customize the audio input.
    • videoConstraints: {}: This selects the default video input device and streams the video to the backend. Similar to audio, you can pass MediaTrackConstraints to define specific video input settings.
  • Video Streams: Using RealtimeVideo component to view streams. Learn more about RealtimeVideo component here.

Resources