Git address: webrtc-demo

Baidu cloud disk sharing: audio and video WebRTC real-time interactive introduction to broadcast technology and practical links: pan.baidu.com/s/1NEJgDF49… Extraction code: VI43

NPM install node app.js // Default port https://localhost:3000 Global installation Nodemon Start the node using nodemon app.js // Hot update HTTPS can be used to disable the local access to the camera Browser enter https://localhost:3000 https://localhost:3000/room https://localhost:3000/ for av equipment information complete projects show the local flow to connect remote flow https://localhost:3000/mediastreamCopy the code

WebRTC provides three core apis:

  • GetUserMedia: You can get a local media stream that contains several tracks, such as video and audio tracks.
  • RTCPeerConnection: Used to establish P2P connections and transmit multimedia data.
  • RTCDataChannel: Establishes a two-way communication data channel that can pass multiple data types.

If peer A wants to establish A WebRTC connection with peer B, perform the following operations:

  • Peer A uses ICE to generate its ICE candidates. In most cases, it requires NAT’s (STUN) session traversal utility or NAT’s (TURN) server traversal using relay.
  • Peer A binds the ICE candidate and session description to an object. This object is stored as A local description (the connection information of the peer) in peer A and sent to peer B through the signaling mechanism. This part is called an offer.
  • Peer B receives the proposal and stores it as a remote description (connection information for the peer on the other end) for further use. Peer B generates its own ICE candidate and session descriptions, stores them as local descriptions, and sends them to peer A via A signaling mechanism. This part is called the answer. (Note: As mentioned earlier, ICE candidates in steps 2 and 3 can also be sent separately.)
  • Peer A receives the answer from peer B and stores it as A remote description.
  • In this way, both peers have connection information for each other and can successfully start communicating via WebRTC!
// Get the audio and video device to initialize the local stream
function start() {
    if(! navigator.mediaDevices || ! navigator.mediaDevices.getUserMedia) {console.log("Not supported");
        return
    } else {
        var deviceId = viodeSource.value;
        // Audio and video capture
        var constraints = {
            video: {
                width: 640.height: 480.frameRate: 15.30, / / / / frame rate
                // facingMode: "enviroment" //
                deviceId: deviceId ? deviceId : undefined  // Video device ID // After setting, you can switch the camera front and rear on the phone
            },
            audio: {
                noiseSuppression: true./ / noise reduction
                echoCancellation: true   Echo cancels}}// Set audio only
        // var constraints = {
        // video: false,
        // audio: true
        // }
        navigator.mediaDevices.getUserMedia(constraints)
        .then(gotMediaStream)
        .then(gotDevices)
        .catch(handleError)
    }
}

function gotMediaStream(stream) {
    console.log(stream)
     videoplay.srcObject = stream;  // Set the video stream
     window.stream = stream;
     // Get the video constraint
    var videoTrack = stream.getVideoTracks()[0];
    var videoConstraints = videoTrack.getSettings();
    constraints.textContent = JSON.stringify(videoConstraints); // Print it on the page
    
    // audioplayer.srcObject = stream; // Set audio only
    return navigator.mediaDevices.enumerateDevices();
    // Promise returns then
}

Copy the code

Establishing a Signaling Server

Use Node + KOA2 to set up signaling server and import socket. IO

npm i socket.io --save

// Server-side code
const Koa = require('koa');
const app = new Koa();
const staticFiles = require('koa-static');
const path = require("path");
// const http = require("http");
const https = require("https");
const fs = require("fs");
const socketIo = require('socket.io');
const log4js = require("log4js");
let logger = log4js.getLogger();
logger.level = "debug";
app.use(staticFiles(path.resolve(__dirname, "public")));


const options = {
    key: fs.readFileSync("./server.key"."utf8"),
    cert: fs.readFileSync("./server.cert"."utf8")};let server = https.createServer(options, app.callback())
// const server = http.createServer(app.callback());

server.listen(3000.() = > {
    console.log('start localhost:The ${3000}`)});const io = socketIo(server);
io.on('connection'.(socket) = > {
    socket.on("join".(room) = > {
        socket.join(room);
        
        // var myRoom = io.sockets.adapter.rooms[room]; // Get the current room. Rooms is a Map object
        var myRoom = io.sockets.adapter.rooms.get(room); // get Obtain Map size Obtain the size
        // Number of users
        var users = myRoom ? myRoom.size : 0;
        logger.debug('-- Number of room users --',users, 'room',room);
        // It's live one-on-one
        if(users < 9) { // Less than three people
            socket.emit("joined", room, socket.id);
            if(users>1) {
                socket.to(room).emit("otherjoin", room, socket.id); }}else {
            // Remove rooms larger than three
            socket.leave(room);
            socket.emit("full", room, socket.id); // The room is full
        }
        // Send it to yourself
        // socket.emit("joined", room, socket.id); 
        // Send to everyone on this node except yourself // Send to everyone on this site except yourself
        // socket.broadcast.emit("joined", room, socket.id);
        // To everyone in the room except yourself
        // socket.to(room).emit("joined",room, socket.id)
        // To everyone in the room
        // io.in(room).emit('joined', room, socket.id);
        
    });
    socket.on("leave".(room) = > {
        // var myRoom = io.sockets.adapter.rooms[room];
        var myRoom = io.sockets.adapter.rooms.get(room); 
        console.log("myRoom",myRoom);
        var users  =  myRoom ? myRoom.size : 0;
        logger.debug('-- Number of room users leaving room --',users-1);
        //
        // socket.broadcast.emit("leaved", room, socket.id);
        socket.to(room).emit('bye', room, socket.id);
        socket.emit("leaved", room, socket.id);
        // socket.leave(room);
        // io.in(room).emit('joined', room, socket.id);
    })
    socket.on("message".(room, data) = >{
        console.log("message",room)
        // socket.broadcast.emit("message",room, data)
        socket.to(room).emit("message",room, data)
    })
});

Copy the code

The front-end code

Introduction of < script SRC = “.. /js/socket.io.min.js”>


// Client code
/ / sockit connection
function coon() {
  console.log("coon")
  socket = io.connect();
  // A new user joins
  socket.on("joined".(roomid, id) = > {
    console.log("Received - joined", roomid, id);
    state = "joined";
    createPeerConnecion();
    btnConn.disabled = true;
    btnLeave.disabled = false;
    console.log("recevie--joined-state--", state);
  });
  // Others join
  socket.on("otherjoin".(roomid, id) = > {
    console.log("-otherjoin-", roomid, id);
    // Start media negotiation
    if (state === "joined_unbind") {
      createPeerConnecion(); // Create a link and bind it
    }    
    state = "joined_conn";
    call();
    // Media negotiation
    console.log("recevie--otherjoin-state--", state);
  });
  // The user is full
  socket.on("full".(roomid, id) = > {
    console.log("-full-", roomid, id);
    state = "leaved";
    console.log("recevie--full-state--", state);
    socket.disconnect(); // Disconnect the connection
    alert("The room is full");
    // It can be reconnected after someone quits
    btnConn.disabled = false;
    btnLeave.disabled = true;
  });
  / / leave
  socket.on("leaved".(roomid, id) = > {
    console.log("-leaved-", roomid, id);
    state = "leaved";
    socket.disconnect();
    btnConn.disabled = false;
    btnLeave.disabled = true;
    console.log("recevie---leaved-state--", state);
  });
  / / left
  socket.on("bye".(roomid, id) = > {
    console.log("-bye-", roomid, id);
    state = "joined_unbind"; // Unbound state
    console.log("bye-state--", state);
    closePeerConnection();
  });
  // The server does not care, the client processing these messages received end-to-end message processing
  socket.on("message".(roomid, data) = > {
    console.log("Received message from client", roomid, data);
    // Media negotiation
    if(data) {
      if(data.type ==='offer') {
        // If an offer is received, the peer end has already been created
        pc.setRemoteDescription(new RTCSessionDescription(data));
			//create answer
			  pc.createAnswer()
				.then(getAnswer)
				.catch(err= >{console.log("Failed to create err getAnswer")});
      } else if(data.type ==='answer') {
        pc.setRemoteDescription(new RTCSessionDescription(data));
      } else if(data.type ==='candidate') {
        var candidate = new RTCIceCandidate({
          sdpMLineIndex: data.label,
          candidate: data.candidate
        });
        // Add peerCollect to the local end
        pc.addIceCandidate(candidate)
				.then(() = >{
					console.log('Successed to add ice candidate');	
				})
				.catch(err= >{
					console.error(err);	
				});
      } else {
        console.error("the message is invalid!", data); }}});// Send a message
  socket.emit("join"."111111"); // Write dead to join the room
  return;
}
Copy the code

Create a RTCPeerConnection

In WebRTC, we establish point-to-point connections between communication parties through RTCPeerConnection. This interface provides the implementation of methods to create, hold, monitor and close connections. To establish the connection, we need a signaling server for exchanging various metadata (signaling) when establishing communication between browsers. At the same time, the STUN or TURN server is required to complete NAT traversal. The establishment of a connection consists of two parts: signaling exchange and setting up ICE candidates.

Common open source RTCPeerConnection turn | stun server

//stun:stun.l.google.com:19302
// stun.xten.com   
// stun.voipbuster.com  
// stun.sipgate.net  
// stun.ekiga.net
// stun.ideasip.com
// stun.schlund.de
// stun.voiparound.com
// stun.voipbuster.com
// stun.voipstunt.com
// stun.counterpath.com
// stun.1und1.de
// stun.gmx.net
// stun.callwithus.com
// stun.counterpath.net
// stun.internetcalls.com
// numb.viagenie.ca
Copy the code

Create a peer

 let iceServer = {
        "iceServers": [
          
          / / {
          // "url": "stun:stun.l.google.com:19302"
          // }].sdpSemantics: 'plan-b'.// sdpSemantics: 'unified-plan',
        // bundlePolicy: 'max-bundle',
        // iceCandidatePoolSize: 0
      };
      / / create
      // PeerConnection for compatible browsers
      let PeerConnection = (window.PeerConnection ||
        window.webkitPeerConnection00 ||
        window.webkitRTCPeerConnection ||
        window.mozRTCPeerConnection)
      / / create
      var peer = new RTCPeerConnection(iceServer);
Copy the code

Google debugging

Google debug: Chrome :// webrtC-internals

Echo cancellation and noise reduction

NoiseSuppression works perfectly in Firefox, and when the constraint is turned off I can hear the microphone very clearly

References:

  • xirsys.com/developers/
  • www.yuque.com/wangdd/open…

Common video conferencing and live streaming architectures

  • www.cnblogs.com/yjmyzz/p/we…

Vue use socket www.imooc.com/article/289…