WebRTC has the ability to share screens. The getDisplayMedia method is used. After agreeing to share the screen, users can get the video stream. Combined with MediaRecorder and Blob, the streaming video data is stored and the video is recorded on the screen.

html

As usual, let’s put some elements on the interface

<div id="container">
<h3>WebRTC capture screen example getDisplayMedia</span></h1>

    <video id="gum-local" autoplay playsinline muted></video>
    <button id="startBtn" disabled>To preview</button>
    <button id="recordBtn" disabled>Start recording</button>
    <button id="downloadBtn" disabled>download</button>

    <div id="msg"></div>
</div>

<! -- Use local adapter -->
<script src=".. /js/adapter-latest.js" async></script>
<script src="js/main.js"></script>
Copy the code

Because my Internet speed is not very good, I downloaded the Adapter file to use locally. If you want to use an official adapter adapter, use the following address to import it

<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
Copy the code
  • Video Is used to preview a video. Once you start sharing videos, give it the video stream
  • Place some buttons and handle the interaction
  • Div# MSG is used to display information

control

Write our control logic in main.js

Let’s get the elements

'use strict';

const startBtn = document.getElementById('startBtn');
const recordBtn = document.getElementById('recordBtn');
const downloadBtn = document.getElementById('downloadBtn');
const video = document.querySelector('video'); // For preview purposes

let mediaRecorder;
let isRecording = false;
let recordedBlobs = []; // Where the video data is temporarily stored
Copy the code

Enabling screen sharing

The main use is the getDisplayMedia method. We are only using video here video: true

startBtn.addEventListener('click'.() = > {
  navigator.mediaDevices.getDisplayMedia({video: true})
      .then(gotDisplayStream, onErr);
});

// Get the screen data stream
function gotDisplayStream(stream) {
  startBtn.disabled = true;
  video.srcObject = stream; // display
  window.stream = stream;   // cache it

  stream.getVideoTracks()[0].addEventListener('ended'.() = > {
    showMsg('User stopped sharing screen');
    startBtn.disabled = false;
    recordBtn.disabled = true;
  });
  recordBtn.disabled = false;
}

function onErr(error) {
  showMsg(`getDisplayMedia on err: ${error.name}`, error);
}

function showMsg(msg, error) {
  const msgEle = document.querySelector('#msg');
  msgEle.innerHTML += `<p>${msg}</p>`;
  if (typeoferror ! = ='undefined') {
    console.error(error); }}Copy the code

After getting the video stream, hand it over to the video for display. Add event listeners to the video stream so we can get events if we stop sharing.

At this point, you can test the shared screen by commenting out the other UI elements. Chrome and Edge ask users if they want to share the screen and let them choose which interface to share. The MAC will require users to change their privacy Settings. Once you agree, you can see the effect of sharing the screen.

????? + Note “mobile” cannot be shared on mobile Chrome

Record the screen

Last time we got the video stream. The video stream data can be saved in the same way as before.

Start by finding out which video formats your browser supports. To simplify operations, we will use only the first supported format.

// Find the supported format
function getSupportedMimeTypes() {
  const possibleTypes = [
    'video/webm; codecs=vp9,opus'.'video/webm; codecs=vp8,opus'.'video/webm; codecs=h264,opus'.'video/mp4; codecs=h264,aac',];return possibleTypes.filter(mimeType= > {
    return MediaRecorder.isTypeSupported(mimeType);
  });
}
Copy the code

Start recording

Push video streaming data into recordedBlobs.

Of course, this is just a trial run, so it’s actually not good to have so much data in memory.

function startRecording() {
  recordedBlobs = [];
  const mimeType = getSupportedMimeTypes()[0];
  const options = { mimeType };

  try {
    mediaRecorder = new MediaRecorder(window.stream, options);
  } catch (e) {
    showMsg('Creating MediaRecorder error:The ${JSON.stringify(e)}`);
    return;
  }
  recordBtn.textContent = 'Stop recording';
  isRecording = true;
  downloadBtn.disabled = true;
  mediaRecorder.onstop = (event) = > {
    showMsg('Recording stopped:' + event);
  };
  mediaRecorder.ondataavailable = handleDataAvailable;
  mediaRecorder.start();
  showMsg('Start recording mediaRecorder:' + mediaRecorder);
}

function handleDataAvailable(event) {
  console.log('handleDataAvailable', event);
  if (event.data && event.data.size > 0) { recordedBlobs.push(event.data); }}Copy the code

To stop recording

mediaRecorder.stop()

function stopRecord() {
  isRecording = false;
  mediaRecorder.stop();
  downloadBtn.disabled = false;
  recordBtn.textContent = "Start recording.";
}
Copy the code

download

Package up the data in recordedBlobs and download it

downloadBtn.addEventListener('click'.() = > {
  const blob = new Blob(recordedBlobs, { type: 'video/webm' });
  const url = window.URL.createObjectURL(blob);
  const a = document.createElement('a');
  a.style.display = 'none';
  a.href = url;
  a.download = 'record screen _' + new Date().getTime() + '.webm';
  document.body.appendChild(a);
  a.click();
  setTimeout(() = > {
    document.body.removeChild(a);
    window.URL.revokeObjectURL(url);
  }, 100);
});
Copy the code

summary

We used WebRTC’s getDisplayMedia method to share the screen. And combined with the previous understanding of the download video method, to achieve a simple screen recording download effect.

The effect

Effect Preview interface

Link to WebRTC local share screen, record screen